aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore10
-rw-r--r--kamon-akka-remote/src/main/resources/META-INF/aop.xml2
-rw-r--r--kamon-akka-remote/src/main/resources/reference.conf14
-rw-r--r--kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala (renamed from kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala)31
-rw-r--r--kamon-akka-remote/src/test/resources/logback.xml17
-rw-r--r--kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala (renamed from kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala)60
-rw-r--r--kamon-akka/src/main/resources/META-INF/aop.xml34
-rw-r--r--kamon-akka/src/main/resources/reference.conf30
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala41
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala34
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala86
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala40
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala212
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala)13
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala)8
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala92
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala169
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/ActorMetricsSpec.scala215
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/DispatcherMetricsSpec.scala207
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/RouterMetricsSpec.scala184
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala)50
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala74
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala)78
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala137
-rw-r--r--kamon-core/src/main/java/kamon/util/Example.java8
-rw-r--r--kamon-core/src/main/java/kamon/util/GlobPathFilter.java96
-rw-r--r--kamon-core/src/main/protobuf/TraceContextAwareWireFormats.proto31
-rw-r--r--kamon-core/src/main/protobuf/WireFormats.proto132
-rw-r--r--kamon-core/src/main/resources/META-INF/aop.xml31
-rw-r--r--kamon-core/src/main/resources/reference.conf206
-rw-r--r--kamon-core/src/main/scala/kamon/Kamon.scala68
-rw-r--r--kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala124
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala16
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala176
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala55
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala163
-rw-r--r--kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala93
-rw-r--r--kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala93
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Entity.scala58
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala173
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala63
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricKey.scala169
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala199
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala117
-rw-r--r--kamon-core/src/main/scala/kamon/metric/RouterMetrics.scala82
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Subscriptions.scala173
-rw-r--r--kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala116
-rw-r--r--kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala65
-rw-r--r--kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala74
-rw-r--r--kamon-core/src/main/scala/kamon/metric/UserMetrics.scala293
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala)0
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala15
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala118
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala164
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala (renamed from kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala)62
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala51
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala65
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala29
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/RefreshScheduler.scala99
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala71
-rw-r--r--kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala61
-rw-r--r--kamon-core/src/main/scala/kamon/supervisor/AspectJPresent.scala (renamed from kamon-core/src/main/scala/kamon/metric/Scale.scala)24
-rw-r--r--kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorExtension.scala125
-rw-r--r--kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorSettings.scala49
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Incubator.scala97
-rw-r--r--kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala120
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Sampler.scala73
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceContext.scala169
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceExtension.scala36
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceLocal.scala34
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala92
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceSubscriptions.scala45
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracerExtension.scala110
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala46
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracingContext.scala92
-rw-r--r--kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala4
-rw-r--r--kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala39
-rw-r--r--kamon-core/src/main/scala/kamon/util/ConfigTools.scala42
-rw-r--r--kamon-core/src/main/scala/kamon/util/FastDispatch.scala38
-rw-r--r--kamon-core/src/main/scala/kamon/util/LazyActorRef.scala53
-rw-r--r--kamon-core/src/main/scala/kamon/util/MapMerge.scala43
-rw-r--r--kamon-core/src/main/scala/kamon/util/PaddedAtomicLong.scala3
-rw-r--r--kamon-core/src/main/scala/kamon/util/Sequencer.scala40
-rw-r--r--kamon-core/src/main/scala/kamon/util/Timestamp.scala101
-rw-r--r--kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala45
-rw-r--r--kamon-core/src/test/resources/logback.xml21
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala52
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala66
-rw-r--r--kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala223
-rw-r--r--kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala108
-rw-r--r--kamon-core/src/test/scala/kamon/metric/RouterMetricsSpec.scala164
-rw-r--r--kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala128
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala87
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala123
-rw-r--r--kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala312
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala17
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala81
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala34
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala23
-rw-r--r--kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala62
-rw-r--r--kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala85
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala115
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala48
-rw-r--r--kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala73
-rw-r--r--kamon-datadog/src/main/resources/reference.conf27
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala65
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala36
-rw-r--r--kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala136
-rw-r--r--kamon-jdbc/src/main/resources/META-INF/aop.xml12
-rw-r--r--kamon-jdbc/src/main/resources/reference.conf26
-rw-r--r--kamon-jdbc/src/main/scala/kamon/jdbc/Jdbc.scala82
-rw-r--r--kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala122
-rw-r--r--kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala32
-rw-r--r--kamon-jdbc/src/test/resources/logback.xml12
-rw-r--r--kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala189
-rw-r--r--kamon-log-reporter/src/main/resources/reference.conf12
-rw-r--r--kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala236
-rw-r--r--kamon-newrelic/src/main/resources/reference.conf15
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/Agent.scala154
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/ApiMethodClient.scala68
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/ClientPipelines.scala23
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala19
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala16
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala30
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala134
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala20
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala72
-rw-r--r--kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala14
-rw-r--r--kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala86
-rw-r--r--kamon-play/src/main/resources/reference.conf9
-rw-r--r--kamon-play/src/main/scala/kamon/play/Play.scala14
-rw-r--r--kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala4
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala43
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala66
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala9
-rw-r--r--kamon-play/src/test/resources/logback.xml12
-rw-r--r--kamon-play/src/test/resources/logger.xml16
-rw-r--r--kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala26
-rw-r--r--kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala54
-rw-r--r--kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala43
-rw-r--r--kamon-play/src/test/scala/kamon/play/instrumentation/FakeRequestIntrumentation.scala1
-rw-r--r--kamon-playground/src/main/resources/application.conf91
-rw-r--r--kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala30
-rw-r--r--kamon-scala/src/main/resources/META-INF/aop.xml17
-rw-r--r--kamon-scala/src/main/resources/reference.conf14
-rw-r--r--kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala)6
-rw-r--r--kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala)6
-rw-r--r--kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala)25
-rw-r--r--kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala)29
-rw-r--r--kamon-spray/src/main/resources/META-INF/aop.xml6
-rw-r--r--kamon-spray/src/main/resources/reference.conf15
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala4
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala (renamed from kamon-spray/src/main/scala/kamon/spray/Spray.scala)61
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala35
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala (renamed from kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala)30
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala (renamed from kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala)64
-rw-r--r--kamon-spray/src/test/resources/application.conf25
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala147
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala67
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala55
-rw-r--r--kamon-statsd/src/main/resources/reference.conf21
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala16
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala56
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala8
-rw-r--r--kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala14
-rw-r--r--kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala162
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/index21
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-freebsd-6.sobin210641 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-linux.sobin246605 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-solaris.sobin251360 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-hpux-11.slbin577452 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-linux.sobin494929 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-pa-hpux-11.slbin516096 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-aix-5.sobin400925 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-linux.sobin258547 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-aix-5.sobin425077 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-linux.sobin330767 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-s390x-linux.sobin269932 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc-solaris.sobin285004 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc64-solaris.sobin261896 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal-macosx.dylibbin377668 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal64-macosx.dylibbin397440 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-5.sobin179751 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-6.sobin179379 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-linux.sobin233385 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-solaris.sobin242880 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-amd64-winnt.dllbin402432 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-x86-winnt.dllbin266240 -> 0 bytes
-rw-r--r--kamon-system-metrics/src/main/resources/reference.conf243
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala84
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala81
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala78
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala87
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala92
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala83
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala76
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala64
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala176
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala71
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala118
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala48
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala54
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala49
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala (renamed from kamon-core/src/main/scala/kamon/metric/package.scala)27
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala53
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala48
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala53
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala60
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala48
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala45
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala57
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala61
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala77
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala203
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala75
-rw-r--r--kamon-system-metrics/src/test/scala/kamon/metrics/RedirectLogging.scala34
-rw-r--r--kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala416
-rw-r--r--kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala (renamed from kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala)5
-rw-r--r--kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala4
-rw-r--r--project/AspectJ.scala15
-rw-r--r--project/Dependencies.scala29
-rw-r--r--project/Projects.scala91
-rw-r--r--project/Publish.scala15
-rw-r--r--project/Release.scala15
-rw-r--r--project/Settings.scala50
-rw-r--r--project/VersionWithSHA.scala15
-rw-r--r--project/build.properties2
-rw-r--r--version.sbt2
228 files changed, 8567 insertions, 6086 deletions
diff --git a/.gitignore b/.gitignore
index 9a29a91d..c91919e0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -28,6 +28,7 @@ project/plugins/project/
.settings
.classpath
.cache
+bin/
_site
@@ -47,3 +48,12 @@ test-result
server.pid
*.iml
*.eml
+
+# Default sigar library provision location.
+native/
+
+# Ignore all the tooling and processed assets from kamon-dashboard's angular side
+kamon-dashboard/src/main/angular/vendor/*
+!kamon-dashboard/src/main/angular/vendor/readme.md
+kamon-dashboard/src/main/angular/tools/dist
+kamon-dashboard/src/main/angular/tools/node_modules
diff --git a/kamon-akka-remote/src/main/resources/META-INF/aop.xml b/kamon-akka-remote/src/main/resources/META-INF/aop.xml
index ba1c8e79..e84a6094 100644
--- a/kamon-akka-remote/src/main/resources/META-INF/aop.xml
+++ b/kamon-akka-remote/src/main/resources/META-INF/aop.xml
@@ -3,7 +3,7 @@
<aspectj>
<aspects>
<!-- Remoting and Cluster -->
- <aspect name="akka.remote.instrumentation.RemotingInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.RemotingInstrumentation"/>
</aspects>
<weaver>
diff --git a/kamon-akka-remote/src/main/resources/reference.conf b/kamon-akka-remote/src/main/resources/reference.conf
new file mode 100644
index 00000000..7c6be896
--- /dev/null
+++ b/kamon-akka-remote/src/main/resources/reference.conf
@@ -0,0 +1,14 @@
+# ========================================= #
+# Kamon-Akka-Remote Reference Configuration #
+# ========================================= #
+
+kamon {
+
+ modules {
+ kamon-akka-remote {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = none
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala b/kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala
index 560008cf..2e4b8fc3 100644
--- a/kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala
+++ b/kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala
@@ -1,11 +1,13 @@
-package akka.remote.instrumentation
+package akka.kamon.instrumentation
import akka.actor.{ ActorRef, Address }
import akka.remote.instrumentation.TraceContextAwareWireFormats.{ TraceContextAwareRemoteEnvelope, RemoteTraceContext, AckAndTraceContextAwareEnvelopeContainer }
import akka.remote.{ RemoteActorRefProvider, Ack, SeqNo }
import akka.remote.WireFormats._
import akka.util.ByteString
-import kamon.trace.TraceRecorder
+import kamon.Kamon
+import kamon.trace.TraceContext
+import kamon.util.MilliTimestamp
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -31,15 +33,13 @@ class RemotingInstrumentation {
envelopeBuilder.setMessage(serializedMessage)
// Attach the TraceContext info, if available.
- if (!TraceRecorder.currentContext.isEmpty) {
- val context = TraceRecorder.currentContext
- val relativeStartMilliTime = System.currentTimeMillis - ((System.nanoTime - context.nanoTimestamp) / 1000000)
+ TraceContext.map { context ⇒
envelopeBuilder.setTraceContext(RemoteTraceContext.newBuilder()
.setTraceName(context.name)
.setTraceToken(context.token)
.setIsOpen(context.isOpen)
- .setStartMilliTime(relativeStartMilliTime)
+ .setStartMilliTime(context.startTimestamp.toMilliTimestamp.millis)
.build())
}
@@ -83,15 +83,16 @@ class RemotingInstrumentation {
if (ackAndEnvelope.hasEnvelope && ackAndEnvelope.getEnvelope.hasTraceContext) {
val remoteTraceContext = ackAndEnvelope.getEnvelope.getTraceContext
- val system = provider.guardian.underlying.system
- val ctx = TraceRecorder.joinRemoteTraceContext(
- remoteTraceContext.getTraceName(),
- remoteTraceContext.getTraceToken(),
- remoteTraceContext.getStartMilliTime(),
- remoteTraceContext.getIsOpen(),
- system)
-
- TraceRecorder.setContext(ctx)
+ val tracer = Kamon.tracer
+
+ val ctx = tracer.newContext(
+ remoteTraceContext.getTraceName,
+ remoteTraceContext.getTraceToken,
+ new MilliTimestamp(remoteTraceContext.getStartMilliTime()).toRelativeNanoTimestamp,
+ remoteTraceContext.getIsOpen,
+ isLocal = false)
+
+ TraceContext.setCurrentContext(ctx)
}
pjp.proceed()
diff --git a/kamon-akka-remote/src/test/resources/logback.xml b/kamon-akka-remote/src/test/resources/logback.xml
new file mode 100644
index 00000000..dd623d61
--- /dev/null
+++ b/kamon-akka-remote/src/test/resources/logback.xml
@@ -0,0 +1,17 @@
+<configuration scan="true">
+ <contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
+ <resetJUL>true</resetJUL>
+ </contextListener>
+
+ <conversionRule conversionWord="traceToken" converterClass="kamon.trace.logging.LogbackTraceTokenConverter"/>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%date{HH:mm:ss.SSS} %-5level [%traceToken][%thread] %logger{55} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="error">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala b/kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala
index 63cc9832..e0aa90ac 100644
--- a/kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala
+++ b/kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala
@@ -8,7 +8,8 @@ import akka.routing.RoundRobinRouter
import akka.testkit.{ ImplicitSender, TestKitBase }
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
-import kamon.trace.TraceRecorder
+import kamon.Kamon
+import kamon.trace.TraceContext
import org.scalatest.{ Matchers, WordSpecLike }
import scala.concurrent.duration._
@@ -17,25 +18,32 @@ import scala.util.control.NonFatal
class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers {
implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("remoting-spec-local-system", ConfigFactory.parseString(
- """
- |akka {
- | actor {
- | provider = "akka.remote.RemoteActorRefProvider"
- | }
- | remote {
- | enabled-transports = ["akka.remote.netty.tcp"]
- | netty.tcp {
- | hostname = "127.0.0.1"
- | port = 2552
- | }
- | }
- |}
- """.stripMargin))
+ implicit lazy val system: ActorSystem = {
+ Kamon.start()
+ ActorSystem("remoting-spec-local-system", ConfigFactory.parseString(
+ """
+ |akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |
+ | actor {
+ | provider = "akka.remote.RemoteActorRefProvider"
+ | }
+ | remote {
+ | enabled-transports = ["akka.remote.netty.tcp"]
+ | netty.tcp {
+ | hostname = "127.0.0.1"
+ | port = 2552
+ | }
+ | }
+ |}
+ """.stripMargin))
+ }
val remoteSystem: ActorSystem = ActorSystem("remoting-spec-remote-system", ConfigFactory.parseString(
"""
|akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |
| actor {
| provider = "akka.remote.RemoteActorRefProvider"
| }
@@ -50,10 +58,11 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
""".stripMargin))
val RemoteSystemAddress = AddressFromURIString("akka.tcp://remoting-spec-remote-system@127.0.0.1:2553")
+ import Kamon.tracer
"The Remoting instrumentation" should {
"propagate the TraceContext when creating a new remote actor" in {
- TraceRecorder.withNewTraceContext("deploy-remote-actor", Some("deploy-remote-actor-1")) {
+ TraceContext.withContext(tracer.newContext("deploy-remote-actor", "deploy-remote-actor-1")) {
system.actorOf(TraceTokenReplier.remoteProps(Some(testActor), RemoteSystemAddress), "remote-deploy-fixture")
}
@@ -63,7 +72,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
"propagate the TraceContext when sending a message to a remotely deployed actor" in {
val remoteRef = system.actorOf(TraceTokenReplier.remoteProps(None, RemoteSystemAddress), "remote-message-fixture")
- TraceRecorder.withNewTraceContext("message-remote-actor", Some("message-remote-actor-1")) {
+ TraceContext.withContext(tracer.newContext("message-remote-actor", "message-remote-actor-1")) {
remoteRef ! "reply-trace-token"
}
@@ -75,7 +84,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
implicit val askTimeout = Timeout(10 seconds)
val remoteRef = system.actorOf(TraceTokenReplier.remoteProps(None, RemoteSystemAddress), "remote-ask-and-pipe-fixture")
- TraceRecorder.withNewTraceContext("ask-and-pipe-remote-actor", Some("ask-and-pipe-remote-actor-1")) {
+ TraceContext.withContext(tracer.newContext("ask-and-pipe-remote-actor", "ask-and-pipe-remote-actor-1")) {
(remoteRef ? "reply-trace-token") pipeTo (testActor)
}
@@ -87,7 +96,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
remoteSystem.actorOf(TraceTokenReplier.props(None), "actor-selection-target-b")
val selection = system.actorSelection(RemoteSystemAddress + "/user/actor-selection-target-*")
- TraceRecorder.withNewTraceContext("message-remote-actor-selection", Some("message-remote-actor-selection-1")) {
+ TraceContext.withContext(tracer.newContext("message-remote-actor-selection", "message-remote-actor-selection-1")) {
selection ! "reply-trace-token"
}
@@ -96,10 +105,10 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
expectMsg("name=message-remote-actor-selection|token=message-remote-actor-selection-1|isOpen=true")
}
- "propagate the TraceContext a remotely supervised child fails" in {
+ "propagate the TraceContext if a remotely supervised child fails" in {
val supervisor = system.actorOf(Props(new SupervisorOfRemote(testActor, RemoteSystemAddress)))
- TraceRecorder.withNewTraceContext("remote-supervision", Some("remote-supervision-1")) {
+ TraceContext.withContext(tracer.newContext("remote-supervision", "remote-supervision-1")) {
supervisor ! "fail"
}
@@ -111,7 +120,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
val routees = Vector[String](RemoteSystemAddress + "/user/remote-routee")
val router = system.actorOf(Props.empty.withRouter(RoundRobinRouter(routees = routees)))
- TraceRecorder.withNewTraceContext("remote-routee", Some("remote-routee-1")) {
+ TraceContext.withContext(tracer.newContext("remote-routee", "remote-routee-1")) {
router ! "reply-trace-token"
}
@@ -130,12 +139,11 @@ class TraceTokenReplier(creationTraceContextListener: Option[ActorRef]) extends
case "fail" ⇒
throw new ArithmeticException("Division by zero.")
case "reply-trace-token" ⇒
- log.info("Sending back the TT: " + TraceRecorder.currentContext.token)
sender ! currentTraceContextInfo
}
def currentTraceContextInfo: String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
s"name=${ctx.name}|token=${ctx.token}|isOpen=${ctx.isOpen}"
}
}
@@ -164,7 +172,7 @@ class SupervisorOfRemote(traceContextListener: ActorRef, remoteAddress: Address)
}
def currentTraceContextInfo: String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
s"name=${ctx.name}|token=${ctx.token}|isOpen=${ctx.isOpen}"
}
}
diff --git a/kamon-akka/src/main/resources/META-INF/aop.xml b/kamon-akka/src/main/resources/META-INF/aop.xml
new file mode 100644
index 00000000..46e63f91
--- /dev/null
+++ b/kamon-akka/src/main/resources/META-INF/aop.xml
@@ -0,0 +1,34 @@
+<!DOCTYPE aspectj PUBLIC "-//AspectJ//DTD//EN" "http://www.eclipse.org/aspectj/dtd/aspectj.dtd">
+
+<aspectj>
+ <aspects>
+
+ <!-- Actors -->
+ <aspect name="akka.kamon.instrumentation.TraceContextIntoRepointableActorRefMixin"/>
+ <aspect name="akka.kamon.instrumentation.TraceContextIntoSystemMessageMixin"/>
+ <aspect name="akka.kamon.instrumentation.ActorSystemMessageInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.TraceContextIntoEnvelopeMixin"/>
+ <aspect name="akka.kamon.instrumentation.MetricsIntoActorCellsMixin"/>
+ <aspect name="akka.kamon.instrumentation.ActorCellInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.RoutedActorCellInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.ActorLoggingInstrumentation"/>
+
+ <!-- Dispatchers -->
+ <aspect name="akka.kamon.instrumentation.DispatcherInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.DispatcherMetricCollectionInfoIntoDispatcherMixin"/>
+
+ <!-- Patterns -->
+ <aspect name="akka.kamon.instrumentation.AskPatternInstrumentation"/>
+ </aspects>
+
+ <weaver>
+ <include within="akka..*"/>
+
+ <!-- For some weird reason ByteString produces a java.lang.VerifyError after going through the weaver. -->
+ <exclude within="akka.util.ByteString"/>
+
+ <!-- Exclude CallingThreadDispatcher, is only for test purposes -->
+ <exclude within="akka.testkit.CallingThreadDispatcher"/>
+ </weaver>
+
+</aspectj> \ No newline at end of file
diff --git a/kamon-akka/src/main/resources/reference.conf b/kamon-akka/src/main/resources/reference.conf
new file mode 100644
index 00000000..cc2b6060
--- /dev/null
+++ b/kamon-akka/src/main/resources/reference.conf
@@ -0,0 +1,30 @@
+# ================================== #
+# Kamon-Akka Reference Configuration #
+# ================================== #
+
+kamon {
+ akka {
+ # If ask-pattern-timeout-warning is enabled, a WARN level log message will be generated if a future generated by the `ask`
+ # pattern fails with a `AskTimeoutException` and the log message will contain information depending of the strategy selected.
+ # strategies:
+ # - off: nothing to do.
+ # - lightweight: logs the warning when a timeout is reached using org.aspectj.lang.reflect.SourceLocation.
+ # - heavyweight: logs the warning when a timeout is reached using a stack trace captured at the moment the future was created.
+ ask-pattern-timeout-warning = off
+
+ }
+
+ metric.filters {
+ actor.includes = []
+ actor.excludes = [ "", "user", "system**", "user/IO-**" ]
+ }
+
+
+ modules {
+ kamon-akka {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = "kamon.akka.Akka"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala
new file mode 100644
index 00000000..c99df586
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala
@@ -0,0 +1,41 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import kamon.metric.{ EntityRecorderFactory, GenericEntityRecorder }
+import kamon.metric.instrument.{ Time, InstrumentFactory }
+
+/**
+ * Entity recorder for Akka Actors. The metrics being tracked are:
+ *
+ * - time-in-mailbox: Time spent from the instant when a message is enqueued in a actor's mailbox to the instant when
+ * that message is dequeued for processing.
+ * - processing-time: Time taken for the actor to process the receive function.
+ * - mailbox-size: Size of the actor's mailbox.
+ * - errors: Number or errors seen by the actor's supervision mechanism.
+ */
+class ActorMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val timeInMailbox = histogram("time-in-mailbox", Time.Nanoseconds)
+ val processingTime = histogram("processing-time", Time.Nanoseconds)
+ val mailboxSize = minMaxCounter("mailbox-size")
+ val errors = counter("errors")
+}
+
+object ActorMetrics extends EntityRecorderFactory[ActorMetrics] {
+ def category: String = "akka-actor"
+ def createRecorder(instrumentFactory: InstrumentFactory): ActorMetrics = new ActorMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala b/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala
new file mode 100644
index 00000000..5b3d19d4
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala
@@ -0,0 +1,34 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import _root_.akka.actor
+import _root_.akka.actor._
+import kamon._
+
+class AkkaExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ val config = system.settings.config.getConfig("kamon.akka")
+ val askPatternTimeoutWarning = config.getString("ask-pattern-timeout-warning")
+ val dispatcher = system.dispatcher
+}
+
+object Akka extends ExtensionId[AkkaExtension] with ExtensionIdProvider {
+ def lookup(): ExtensionId[_ <: actor.Extension] = Akka
+ def createExtension(system: ExtendedActorSystem): AkkaExtension = new AkkaExtension(system)
+
+}
+
diff --git a/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala
new file mode 100644
index 00000000..acf92e70
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala
@@ -0,0 +1,86 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import java.util.concurrent.ThreadPoolExecutor
+
+import _root_.akka.dispatch.ForkJoinExecutorConfigurator.AkkaForkJoinPool
+import kamon.metric._
+import kamon.metric.instrument.{ DifferentialValueCollector, InstrumentFactory }
+
+class ForkJoinPoolDispatcherMetrics(fjp: AkkaForkJoinPool, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val paralellism = minMaxCounter("parallelism")
+ paralellism.increment(fjp.getParallelism) // Steady value.
+
+ val poolSize = gauge("pool-size", () ⇒ {
+ fjp.getPoolSize.toLong
+ })
+
+ val activeThreads = gauge("active-threads", () ⇒ {
+ fjp.getActiveThreadCount.toLong
+ })
+
+ val runningThreads = gauge("running-threads", () ⇒ {
+ fjp.getRunningThreadCount.toLong
+ })
+
+ val queuedTaskCount = gauge("queued-task-count", () ⇒ {
+ fjp.getQueuedTaskCount
+ })
+}
+
+object ForkJoinPoolDispatcherMetrics {
+
+ def factory(fjp: AkkaForkJoinPool) = new EntityRecorderFactory[ForkJoinPoolDispatcherMetrics] {
+ def category: String = AkkaDispatcherMetrics.Category
+ def createRecorder(instrumentFactory: InstrumentFactory) = new ForkJoinPoolDispatcherMetrics(fjp, instrumentFactory)
+ }
+}
+
+class ThreadPoolExecutorDispatcherMetrics(tpe: ThreadPoolExecutor, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val corePoolSize = gauge("core-pool-size", () ⇒ {
+ tpe.getCorePoolSize.toLong
+ })
+
+ val maxPoolSize = gauge("max-pool-size", () ⇒ {
+ tpe.getMaximumPoolSize.toLong
+ })
+
+ val poolSize = gauge("pool-size", () ⇒ {
+ tpe.getPoolSize.toLong
+ })
+
+ val activeThreads = gauge("active-threads", () ⇒ {
+ tpe.getActiveCount.toLong
+ })
+
+ val processedTasks = gauge("processed-tasks", DifferentialValueCollector(() ⇒ {
+ tpe.getTaskCount
+ }))
+}
+
+object ThreadPoolExecutorDispatcherMetrics {
+
+ def factory(tpe: ThreadPoolExecutor) = new EntityRecorderFactory[ThreadPoolExecutorDispatcherMetrics] {
+ def category: String = AkkaDispatcherMetrics.Category
+ def createRecorder(instrumentFactory: InstrumentFactory) = new ThreadPoolExecutorDispatcherMetrics(tpe, instrumentFactory)
+ }
+}
+
+object AkkaDispatcherMetrics {
+ val Category = "akka-dispatcher"
+}
diff --git a/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala
new file mode 100644
index 00000000..5c5bb05a
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala
@@ -0,0 +1,40 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.akka
+
+import kamon.metric._
+import kamon.metric.instrument.{ Time, InstrumentFactory }
+
+/**
+ * Entity recorder for Akka Routers. The metrics being tracked are:
+ *
+ * - routing-time: Time taken for the router to process the routing logic.
+ * - time-in-mailbox: Time spent from the instant when a message is enqueued in a actor's mailbox to the instant when
+ * that message is dequeued for processing.
+ * - processing-time: Time taken for the actor to process the receive function.
+ * - errors: Number or errors seen by the actor's supervision mechanism.
+ */
+class RouterMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val routingTime = histogram("routing-time", Time.Nanoseconds)
+ val timeInMailbox = histogram("time-in-mailbox", Time.Nanoseconds)
+ val processingTime = histogram("processing-time", Time.Nanoseconds)
+ val errors = counter("errors")
+}
+
+object RouterMetrics extends EntityRecorderFactory[RouterMetrics] {
+ def category: String = "akka-router"
+ def createRecorder(instrumentFactory: InstrumentFactory): RouterMetrics = new RouterMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala
new file mode 100644
index 00000000..7c722569
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala
@@ -0,0 +1,212 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package akka.kamon.instrumentation
+
+import akka.actor._
+import akka.dispatch.{ Envelope, MessageDispatcher }
+import akka.routing.RoutedActorCell
+import kamon.Kamon
+import kamon.akka.{ RouterMetrics, ActorMetrics }
+import kamon.metric.Entity
+import kamon.trace._
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
+
+@Aspect
+class ActorCellInstrumentation {
+
+ @Pointcut("execution(akka.actor.ActorCell.new(..)) && this(cell) && args(system, ref, props, dispatcher, parent)")
+ def actorCellCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {}
+
+ @After("actorCellCreation(cell, system, ref, props, dispatcher, parent)")
+ def afterCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {
+ Kamon.metrics.register(ActorMetrics, ref.path.elements.mkString("/")).map { registration ⇒
+ val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+
+ cellMetrics.entity = registration.entity
+ cellMetrics.recorder = Some(registration.recorder)
+ }
+
+ }
+
+ @Pointcut("execution(* akka.actor.ActorCell.invoke(*)) && this(cell) && args(envelope)")
+ def invokingActorBehaviourAtActorCell(cell: ActorCell, envelope: Envelope) = {}
+
+ @Around("invokingActorBehaviourAtActorCell(cell, envelope)")
+ def aroundBehaviourInvoke(pjp: ProceedingJoinPoint, cell: ActorCell, envelope: Envelope): Any = {
+ val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+ val timestampBeforeProcessing = System.nanoTime()
+ val contextAndTimestamp = envelope.asInstanceOf[TimestampedTraceContextAware]
+
+ try {
+ TraceContext.withContext(contextAndTimestamp.traceContext) {
+ pjp.proceed()
+ }
+ } finally {
+ cellMetrics.recorder.map { am ⇒
+ val processingTime = System.nanoTime() - timestampBeforeProcessing
+ val timeInMailbox = timestampBeforeProcessing - contextAndTimestamp.captureNanoTime
+
+ am.processingTime.record(processingTime)
+ am.timeInMailbox.record(timeInMailbox)
+ am.mailboxSize.decrement()
+
+ // In case that this actor is behind a router, record the metrics for the router.
+ envelope.asInstanceOf[RouterAwareEnvelope].routerMetricsRecorder.map { rm ⇒
+ rm.processingTime.record(processingTime)
+ rm.timeInMailbox.record(timeInMailbox)
+ }
+ }
+ }
+ }
+
+ @Pointcut("execution(* akka.actor.ActorCell.sendMessage(*)) && this(cell) && args(envelope)")
+ def sendMessageInActorCell(cell: ActorCell, envelope: Envelope): Unit = {}
+
+ @After("sendMessageInActorCell(cell, envelope)")
+ def afterSendMessageInActorCell(cell: ActorCell, envelope: Envelope): Unit = {
+ val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+ cellMetrics.recorder.map(_.mailboxSize.increment())
+ }
+
+ @Pointcut("execution(* akka.actor.ActorCell.stop()) && this(cell)")
+ def actorStop(cell: ActorCell): Unit = {}
+
+ @After("actorStop(cell)")
+ def afterStop(cell: ActorCell): Unit = {
+ val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+ cellMetrics.recorder.map { _ ⇒
+ Kamon.metrics.unregister(cellMetrics.entity)
+ }
+
+ // The Stop can't be captured from the RoutedActorCell so we need to put this piece of cleanup here.
+ if (cell.isInstanceOf[RoutedActorCell]) {
+ val routedCellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
+ routedCellMetrics.routerRecorder.map { _ ⇒
+ Kamon.metrics.unregister(routedCellMetrics.routerEntity)
+ }
+ }
+ }
+
+ @Pointcut("execution(* akka.actor.ActorCell.handleInvokeFailure(..)) && this(cell)")
+ def actorInvokeFailure(cell: ActorCell): Unit = {}
+
+ @Before("actorInvokeFailure(cell)")
+ def beforeInvokeFailure(cell: ActorCell): Unit = {
+ val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
+ cellWithMetrics.recorder.map(_.errors.increment())
+
+ // In case that this actor is behind a router, count the errors for the router as well.
+ val envelope = cell.currentMessage.asInstanceOf[RouterAwareEnvelope]
+ envelope.routerMetricsRecorder.map(_.errors.increment())
+ }
+}
+
+@Aspect
+class RoutedActorCellInstrumentation {
+
+ @Pointcut("execution(akka.routing.RoutedActorCell.new(..)) && this(cell) && args(system, ref, props, dispatcher, routeeProps, supervisor)")
+ def routedActorCellCreation(cell: RoutedActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, routeeProps: Props, supervisor: ActorRef): Unit = {}
+
+ @After("routedActorCellCreation(cell, system, ref, props, dispatcher, routeeProps, supervisor)")
+ def afterRoutedActorCellCreation(cell: RoutedActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, routeeProps: Props, supervisor: ActorRef): Unit = {
+ Kamon.metrics.register(RouterMetrics, ref.path.elements.mkString("/")).map { registration ⇒
+ val cellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
+
+ cellMetrics.routerEntity = registration.entity
+ cellMetrics.routerRecorder = Some(registration.recorder)
+ }
+ }
+
+ @Pointcut("execution(* akka.routing.RoutedActorCell.sendMessage(*)) && this(cell) && args(envelope)")
+ def sendMessageInRouterActorCell(cell: RoutedActorCell, envelope: Envelope) = {}
+
+ @Around("sendMessageInRouterActorCell(cell, envelope)")
+ def aroundSendMessageInRouterActorCell(pjp: ProceedingJoinPoint, cell: RoutedActorCell, envelope: Envelope): Any = {
+ val cellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
+ val timestampBeforeProcessing = System.nanoTime()
+ val contextAndTimestamp = envelope.asInstanceOf[TimestampedTraceContextAware]
+
+ try {
+ TraceContext.withContext(contextAndTimestamp.traceContext) {
+
+ // The router metrics recorder will only be picked up if the message is sent from a tracked router.
+ RouterAwareEnvelope.dynamicRouterMetricsRecorder.withValue(cellMetrics.routerRecorder) {
+ pjp.proceed()
+ }
+ }
+ } finally {
+ cellMetrics.routerRecorder map { routerRecorder ⇒
+ routerRecorder.routingTime.record(System.nanoTime() - timestampBeforeProcessing)
+ }
+ }
+ }
+}
+
+trait ActorCellMetrics {
+ var entity: Entity = _
+ var recorder: Option[ActorMetrics] = None
+}
+
+trait RoutedActorCellMetrics {
+ var routerEntity: Entity = _
+ var routerRecorder: Option[RouterMetrics] = None
+}
+
+trait RouterAwareEnvelope {
+ def routerMetricsRecorder: Option[RouterMetrics]
+}
+
+object RouterAwareEnvelope {
+ import scala.util.DynamicVariable
+ private[kamon] val dynamicRouterMetricsRecorder = new DynamicVariable[Option[RouterMetrics]](None)
+
+ def default: RouterAwareEnvelope = new RouterAwareEnvelope {
+ val routerMetricsRecorder: Option[RouterMetrics] = dynamicRouterMetricsRecorder.value
+ }
+}
+
+@Aspect
+class MetricsIntoActorCellsMixin {
+
+ @DeclareMixin("akka.actor.ActorCell")
+ def mixinActorCellMetricsToActorCell: ActorCellMetrics = new ActorCellMetrics {}
+
+ @DeclareMixin("akka.routing.RoutedActorCell")
+ def mixinActorCellMetricsToRoutedActorCell: RoutedActorCellMetrics = new RoutedActorCellMetrics {}
+
+}
+
+@Aspect
+class TraceContextIntoEnvelopeMixin {
+
+ @DeclareMixin("akka.dispatch.Envelope")
+ def mixinTraceContextAwareToEnvelope: TimestampedTraceContextAware = TimestampedTraceContextAware.default
+
+ @DeclareMixin("akka.dispatch.Envelope")
+ def mixinRouterAwareToEnvelope: RouterAwareEnvelope = RouterAwareEnvelope.default
+
+ @Pointcut("execution(akka.dispatch.Envelope.new(..)) && this(ctx)")
+ def envelopeCreation(ctx: TimestampedTraceContextAware): Unit = {}
+
+ @After("envelopeCreation(ctx)")
+ def afterEnvelopeCreation(ctx: TimestampedTraceContextAware with RouterAwareEnvelope): Unit = {
+ // Necessary to force the initialization of ContextAware at the moment of creation.
+ ctx.traceContext
+ ctx.routerMetricsRecorder
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala
index 82b8304d..dd998c6b 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala
@@ -14,14 +14,15 @@
* =========================================================================================
*/
-package akka.instrumentation
+package akka.kamon.instrumentation
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.logging.MdcKeysSupport
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@Aspect
-class ActorLoggingInstrumentation {
+class ActorLoggingInstrumentation extends MdcKeysSupport {
@DeclareMixin("akka.event.Logging.LogEvent+")
def mixinTraceContextAwareToLogEvent: TraceContextAware = TraceContextAware.default
@@ -40,8 +41,10 @@ class ActorLoggingInstrumentation {
@Around("withMdcInvocation(logSource, logEvent, logStatement)")
def aroundWithMdcInvocation(pjp: ProceedingJoinPoint, logSource: String, logEvent: TraceContextAware, logStatement: () ⇒ _): Unit = {
- TraceRecorder.withInlineTraceContextReplacement(logEvent.traceContext) {
- pjp.proceed()
+ TraceContext.withContext(logEvent.traceContext) {
+ withMdc {
+ pjp.proceed()
+ }
}
}
}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala
index 7845e90d..0cb4ef13 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala
@@ -14,10 +14,10 @@
* =========================================================================================
*/
-package akka.instrumentation
+package akka.kamon.instrumentation
import akka.dispatch.sysmsg.EarliestFirstSystemMessageList
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -31,7 +31,7 @@ class ActorSystemMessageInstrumentation {
def aroundSystemMessageInvoke(pjp: ProceedingJoinPoint, messages: EarliestFirstSystemMessageList): Any = {
if (messages.nonEmpty) {
val ctx = messages.head.asInstanceOf[TraceContextAware].traceContext
- TraceRecorder.withInlineTraceContextReplacement(ctx)(pjp.proceed())
+ TraceContext.withContext(ctx)(pjp.proceed())
} else pjp.proceed()
}
@@ -73,7 +73,7 @@ class TraceContextIntoRepointableActorRefMixin {
@Around("repointableActorRefCreation(repointableActorRef)")
def afterRepointableActorRefCreation(pjp: ProceedingJoinPoint, repointableActorRef: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(repointableActorRef.traceContext) {
+ TraceContext.withContext(repointableActorRef.traceContext) {
pjp.proceed()
}
}
diff --git a/kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala
new file mode 100644
index 00000000..e1dcdf32
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala
@@ -0,0 +1,92 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package akka.kamon.instrumentation
+
+import akka.util.Timeout
+import kamon.Kamon
+import kamon.akka.Akka
+import kamon.trace.{ TraceContext, EmptyTraceContext, TraceContextAware }
+import akka.actor.{ InternalActorRef, ActorSystem, ActorRef }
+import akka.event.Logging.Warning
+import akka.pattern.{ PromiseActorRef, AskTimeoutException }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
+import org.aspectj.lang.reflect.SourceLocation
+import scala.concurrent.Future
+import scala.compat.Platform.EOL
+import scala.concurrent.duration.FiniteDuration
+
+@Aspect
+class AskPatternInstrumentation {
+
+ import AskPatternInstrumentation._
+
+ @Pointcut("call(* akka.pattern.AskableActorRef$.$qmark$extension(..)) && args(actor, *, timeout)")
+ def askableActorRefAsk(actor: ActorRef, timeout: Timeout): Unit = {}
+
+ @Around("askableActorRefAsk(actor, timeout)")
+ def hookAskTimeoutWarning(pjp: ProceedingJoinPoint, actor: ActorRef, timeout: Timeout): AnyRef =
+ TraceContext.map { ctx ⇒
+ actor match {
+ // the AskPattern will only work for InternalActorRef's with these conditions.
+ case ref: InternalActorRef if !ref.isTerminated && timeout.duration.length > 0 ⇒
+ val akkaExtension = Kamon.extension(Akka)
+ val future = pjp.proceed().asInstanceOf[Future[AnyRef]]
+ val system = ref.provider.guardian.underlying.system
+
+ val handler = akkaExtension.askPatternTimeoutWarning match {
+ case "off" ⇒ None
+ case "lightweight" ⇒ Some(errorHandler(callInfo = Some(CallInfo(s"${actor.path.name} ?", pjp.getSourceLocation)))(system))
+ case "heavyweight" ⇒ Some(errorHandler(stack = Some(new StackTraceCaptureException))(system))
+ }
+
+ handler.map(future.onFailure(_)(akkaExtension.dispatcher))
+ future
+
+ case _ ⇒ pjp.proceed() //
+ }
+
+ } getOrElse (pjp.proceed())
+
+ def errorHandler(callInfo: Option[CallInfo] = None, stack: Option[StackTraceCaptureException] = None)(implicit system: ActorSystem): ErrorHandler = {
+ case e: AskTimeoutException ⇒
+ val message = {
+ if (stack.isDefined) stack.map(s ⇒ s.getStackTrace.drop(3).mkString("", EOL, EOL))
+ else callInfo.map(_.message)
+ }
+ publish(message)
+ }
+
+ def publish(message: Option[String])(implicit system: ActorSystem) = message map { msg ⇒
+ system.eventStream.publish(Warning("AskPatternTracing", classOf[AskPatternInstrumentation],
+ s"Timeout triggered for ask pattern registered at: $msg"))
+ }
+}
+
+object AskPatternInstrumentation {
+ type ErrorHandler = PartialFunction[Throwable, Unit]
+
+ class StackTraceCaptureException extends Throwable
+
+ case class CallInfo(name: String, sourceLocation: SourceLocation) {
+ def message: String = {
+ def locationInfo: String = Option(sourceLocation).map(location ⇒ s"${location.getFileName}:${location.getLine}").getOrElse("<unknown position>")
+ def line: String = s"$name @ $locationInfo"
+ s"$line"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala
new file mode 100644
index 00000000..7b15c443
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala
@@ -0,0 +1,169 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package akka.kamon.instrumentation
+
+import java.util.concurrent.{ ExecutorService, ThreadPoolExecutor }
+
+import akka.actor.{ ActorSystem, ActorSystemImpl }
+import akka.dispatch.ForkJoinExecutorConfigurator.AkkaForkJoinPool
+import akka.dispatch._
+import akka.kamon.instrumentation.LookupDataAware.LookupData
+import kamon.Kamon
+import kamon.akka.{ AkkaDispatcherMetrics, ThreadPoolExecutorDispatcherMetrics, ForkJoinPoolDispatcherMetrics }
+import kamon.metric.Entity
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
+
+@Aspect
+class DispatcherInstrumentation {
+
+ @Pointcut("execution(* akka.actor.ActorSystemImpl.start(..)) && this(system)")
+ def actorSystemInitialization(system: ActorSystemImpl): Unit = {}
+
+ @Before("actorSystemInitialization(system)")
+ def afterActorSystemInitialization(system: ActorSystemImpl): Unit = {
+ system.dispatchers.asInstanceOf[ActorSystemAware].actorSystem = system
+
+ // The default dispatcher for the actor system is looked up in the ActorSystemImpl's initialization code and we
+ // can't get the Metrics extension there since the ActorSystem is not yet fully constructed. To workaround that
+ // we are manually selecting and registering the default dispatcher with the Metrics extension. All other dispatchers
+ // will by registered by the instrumentation bellow.
+
+ // Yes, reflection sucks, but this piece of code is only executed once on ActorSystem's startup.
+ val defaultDispatcher = system.dispatcher
+ val executorServiceDelegateField = defaultDispatcher.getClass.getDeclaredField("executorServiceDelegate")
+ executorServiceDelegateField.setAccessible(true)
+
+ val lazyExecutorServiceDelegate = executorServiceDelegateField.get(defaultDispatcher)
+ val executorField = lazyExecutorServiceDelegate.getClass.getMethod("executor")
+ executorField.setAccessible(true)
+
+ val defaultDispatcherExecutor = executorField.invoke(lazyExecutorServiceDelegate).asInstanceOf[ExecutorService]
+ registerDispatcher(Dispatchers.DefaultDispatcherId, defaultDispatcherExecutor, system)
+ }
+
+ private def registerDispatcher(dispatcherName: String, executorService: ExecutorService, system: ActorSystem): Unit =
+ executorService match {
+ case fjp: AkkaForkJoinPool ⇒
+ Kamon.metrics.register(ForkJoinPoolDispatcherMetrics.factory(fjp), dispatcherName)
+
+ case tpe: ThreadPoolExecutor ⇒
+ Kamon.metrics.register(ThreadPoolExecutorDispatcherMetrics.factory(tpe), dispatcherName)
+
+ case others ⇒ // Currently not interested in other kinds of dispatchers.
+ }
+
+ @Pointcut("execution(* akka.dispatch.Dispatchers.lookup(..)) && this(dispatchers) && args(dispatcherName)")
+ def dispatchersLookup(dispatchers: ActorSystemAware, dispatcherName: String) = {}
+
+ @Around("dispatchersLookup(dispatchers, dispatcherName)")
+ def aroundDispatchersLookup(pjp: ProceedingJoinPoint, dispatchers: ActorSystemAware, dispatcherName: String): Any =
+ LookupDataAware.withLookupData(LookupData(dispatcherName, dispatchers.actorSystem)) {
+ pjp.proceed()
+ }
+
+ @Pointcut("initialization(akka.dispatch.ExecutorServiceFactory.new(..)) && target(factory)")
+ def executorServiceFactoryInitialization(factory: LookupDataAware): Unit = {}
+
+ @After("executorServiceFactoryInitialization(factory)")
+ def afterExecutorServiceFactoryInitialization(factory: LookupDataAware): Unit =
+ factory.lookupData = LookupDataAware.currentLookupData
+
+ @Pointcut("execution(* akka.dispatch.ExecutorServiceFactory+.createExecutorService()) && this(factory) && !cflow(execution(* akka.dispatch.Dispatcher.shutdown()))")
+ def createExecutorService(factory: LookupDataAware): Unit = {}
+
+ @AfterReturning(pointcut = "createExecutorService(factory)", returning = "executorService")
+ def afterCreateExecutorService(factory: LookupDataAware, executorService: ExecutorService): Unit = {
+ val lookupData = factory.lookupData
+
+ // lookupData.actorSystem will be null only during the first lookup of the default dispatcher during the
+ // ActorSystemImpl's initialization.
+ if (lookupData.actorSystem != null)
+ registerDispatcher(lookupData.dispatcherName, executorService, lookupData.actorSystem)
+ }
+
+ @Pointcut("initialization(akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.new(..)) && this(lazyExecutor)")
+ def lazyExecutorInitialization(lazyExecutor: LookupDataAware): Unit = {}
+
+ @After("lazyExecutorInitialization(lazyExecutor)")
+ def afterLazyExecutorInitialization(lazyExecutor: LookupDataAware): Unit =
+ lazyExecutor.lookupData = LookupDataAware.currentLookupData
+
+ @Pointcut("execution(* akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.copy()) && this(lazyExecutor)")
+ def lazyExecutorCopy(lazyExecutor: LookupDataAware): Unit = {}
+
+ @Around("lazyExecutorCopy(lazyExecutor)")
+ def aroundLazyExecutorCopy(pjp: ProceedingJoinPoint, lazyExecutor: LookupDataAware): Any =
+ LookupDataAware.withLookupData(lazyExecutor.lookupData) {
+ pjp.proceed()
+ }
+
+ @Pointcut("execution(* akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.shutdown()) && this(lazyExecutor)")
+ def lazyExecutorShutdown(lazyExecutor: LookupDataAware): Unit = {}
+
+ @After("lazyExecutorShutdown(lazyExecutor)")
+ def afterLazyExecutorShutdown(lazyExecutor: LookupDataAware): Unit = {
+ import lazyExecutor.lookupData
+
+ if (lookupData.actorSystem != null)
+ Kamon.metrics.unregister(Entity(lookupData.dispatcherName, AkkaDispatcherMetrics.Category))
+ }
+
+}
+
+@Aspect
+class DispatcherMetricCollectionInfoIntoDispatcherMixin {
+
+ @DeclareMixin("akka.dispatch.Dispatchers")
+ def mixinActorSystemAwareToDispatchers: ActorSystemAware = ActorSystemAware()
+
+ @DeclareMixin("akka.dispatch.Dispatcher.LazyExecutorServiceDelegate")
+ def mixinLookupDataAwareToExecutors: LookupDataAware = LookupDataAware()
+
+ @DeclareMixin("akka.dispatch.ExecutorServiceFactory+")
+ def mixinActorSystemAwareToDispatcher: LookupDataAware = LookupDataAware()
+}
+
+trait ActorSystemAware {
+ @volatile var actorSystem: ActorSystem = _
+}
+
+object ActorSystemAware {
+ def apply(): ActorSystemAware = new ActorSystemAware {}
+}
+
+trait LookupDataAware {
+ @volatile var lookupData: LookupData = _
+}
+
+object LookupDataAware {
+ case class LookupData(dispatcherName: String, actorSystem: ActorSystem)
+
+ private val _currentDispatcherLookupData = new ThreadLocal[LookupData]
+
+ def apply() = new LookupDataAware {}
+
+ def currentLookupData: LookupData = _currentDispatcherLookupData.get()
+
+ def withLookupData[T](lookupData: LookupData)(thunk: ⇒ T): T = {
+ _currentDispatcherLookupData.set(lookupData)
+ val result = thunk
+ _currentDispatcherLookupData.remove()
+
+ result
+ }
+} \ No newline at end of file
diff --git a/kamon-akka/src/test/scala/kamon/akka/ActorMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/akka/ActorMetricsSpec.scala
new file mode 100644
index 00000000..0d8d41e3
--- /dev/null
+++ b/kamon-akka/src/test/scala/kamon/akka/ActorMetricsSpec.scala
@@ -0,0 +1,215 @@
+/* =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import java.nio.LongBuffer
+
+import akka.actor._
+import akka.testkit.TestProbe
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.akka.ActorMetricsTestActor._
+import kamon.metric.EntitySnapshot
+import kamon.metric.instrument.CollectionContext
+import kamon.testkit.BaseKamonSpec
+
+import scala.concurrent.duration._
+
+class ActorMetricsSpec extends BaseKamonSpec("actor-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters {
+ | akka-actor {
+ | includes = [ "user/tracked-*", "user/measuring-*", "user/clean-after-collect", "user/stop" ]
+ | excludes = [ "user/tracked-explicitly-excluded", "user/non-tracked-actor" ]
+ | }
+ | }
+ |
+ | instrument-settings {
+ | akka-actor.mailbox-size.refresh-interval = 1 hour
+ | }
+ |}
+ |
+ |akka.loglevel = OFF
+ |
+ """.stripMargin)
+
+ "the Kamon actor metrics" should {
+ "respect the configured include and exclude filters" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("tracked-actor")
+ actorMetricsRecorderOf(trackedActor) should not be empty
+
+ val nonTrackedActor = createTestActor("non-tracked-actor")
+ actorMetricsRecorderOf(nonTrackedActor) shouldBe empty
+
+ val trackedButExplicitlyExcluded = createTestActor("tracked-explicitly-excluded")
+ actorMetricsRecorderOf(trackedButExplicitlyExcluded) shouldBe empty
+ }
+
+ "reset all recording instruments after taking a snapshot" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("clean-after-collect")
+
+ for (_ ← 1 to 100) {
+ for (i ← 1 to 100) {
+ trackedActor ! Discard
+ }
+ trackedActor ! Fail
+ trackedActor ! Ping
+ expectMsg(Pong)
+
+ val firstSnapshot = collectMetricsOf(trackedActor).get
+ firstSnapshot.counter("errors").get.count should be(1L)
+ firstSnapshot.minMaxCounter("mailbox-size").get.numberOfMeasurements should be > 0L
+ firstSnapshot.histogram("processing-time").get.numberOfMeasurements should be(102L) // 102 examples
+ firstSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(102L) // 102 examples
+
+ val secondSnapshot = collectMetricsOf(trackedActor).get // Ensure that the recorders are clean
+ secondSnapshot.counter("errors").get.count should be(0L)
+ secondSnapshot.minMaxCounter("mailbox-size").get.numberOfMeasurements should be(3L) // min, max and current
+ secondSnapshot.histogram("processing-time").get.numberOfMeasurements should be(0L)
+ secondSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(0L)
+ }
+ }
+
+ "record the processing-time of the receive function" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-processing-time")
+
+ trackedActor ! TrackTimings(sleep = Some(100 millis))
+ val timings = expectMsgType[TrackedTimings]
+ val snapshot = collectMetricsOf(trackedActor).get
+
+ snapshot.histogram("processing-time").get.numberOfMeasurements should be(1L)
+ snapshot.histogram("processing-time").get.recordsIterator.next().count should be(1L)
+ snapshot.histogram("processing-time").get.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ }
+
+ "record the number of errors" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-errors")
+
+ for (i ← 1 to 10) { trackedActor ! Fail }
+ trackedActor ! Ping
+ expectMsg(Pong)
+ val snapshot = collectMetricsOf(trackedActor).get
+
+ snapshot.counter("errors").get.count should be(10)
+ }
+
+ "record the mailbox-size" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-mailbox-size")
+
+ trackedActor ! TrackTimings(sleep = Some(100 millis))
+ for (i ← 1 to 10) {
+ trackedActor ! Discard
+ }
+ trackedActor ! Ping
+
+ val timings = expectMsgType[TrackedTimings]
+ expectMsg(Pong)
+ val snapshot = collectMetricsOf(trackedActor).get
+
+ snapshot.minMaxCounter("mailbox-size").get.min should be(0L)
+ snapshot.minMaxCounter("mailbox-size").get.max should be(11L +- 1L)
+ }
+
+ "record the time-in-mailbox" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-time-in-mailbox")
+
+ trackedActor ! TrackTimings(sleep = Some(100 millis))
+ val timings = expectMsgType[TrackedTimings]
+ val snapshot = collectMetricsOf(trackedActor).get
+
+ snapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(1L)
+ snapshot.histogram("time-in-mailbox").get.recordsIterator.next().count should be(1L)
+ snapshot.histogram("time-in-mailbox").get.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ }
+
+ "clean up the associated recorder when the actor is stopped" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("stop")
+ val firstRecorder = actorMetricsRecorderOf(trackedActor).get
+
+ // Killing the actor should remove it's ActorMetrics and registering again bellow should create a new one.
+ val deathWatcher = TestProbe()
+ deathWatcher.watch(trackedActor)
+ trackedActor ! PoisonPill
+ deathWatcher.expectTerminated(trackedActor)
+
+ actorMetricsRecorderOf(trackedActor).get shouldNot be theSameInstanceAs (firstRecorder)
+ }
+ }
+
+ trait ActorMetricsFixtures {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ def actorRecorderName(ref: ActorRef): String = ref.path.elements.mkString("/")
+
+ def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetrics] =
+ Kamon.metrics.register(ActorMetrics, actorRecorderName(ref)).map(_.recorder)
+
+ def collectMetricsOf(ref: ActorRef): Option[EntitySnapshot] = {
+ Thread.sleep(5) // Just in case the test advances a bit faster than the actor being tested.
+ actorMetricsRecorderOf(ref).map(_.collect(collectionContext))
+ }
+
+ def createTestActor(name: String): ActorRef = {
+ val actor = system.actorOf(Props[ActorMetricsTestActor], name)
+ val initialiseListener = TestProbe()
+
+ // Ensure that the router has been created before returning.
+ actor.tell(Ping, initialiseListener.ref)
+ initialiseListener.expectMsg(Pong)
+
+ // Cleanup all the metric recording instruments:
+ collectMetricsOf(actor)
+
+ actor
+ }
+ }
+}
+
+class ActorMetricsTestActor extends Actor {
+ def receive = {
+ case Discard ⇒
+ case Fail ⇒ throw new ArithmeticException("Division by zero.")
+ case Ping ⇒ sender ! Pong
+ case TrackTimings(sendTimestamp, sleep) ⇒ {
+ val dequeueTimestamp = System.nanoTime()
+ sleep.map(s ⇒ Thread.sleep(s.toMillis))
+ val afterReceiveTimestamp = System.nanoTime()
+
+ sender ! TrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
+ }
+ }
+}
+
+object ActorMetricsTestActor {
+ case object Ping
+ case object Pong
+ case object Fail
+ case object Discard
+
+ case class TrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
+ case class TrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
+ def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
+ def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
+ }
+}
diff --git a/kamon-akka/src/test/scala/kamon/akka/DispatcherMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/akka/DispatcherMetricsSpec.scala
new file mode 100644
index 00000000..dd5cfa45
--- /dev/null
+++ b/kamon-akka/src/test/scala/kamon/akka/DispatcherMetricsSpec.scala
@@ -0,0 +1,207 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import akka.actor.ActorRef
+import akka.dispatch.MessageDispatcher
+import akka.testkit.TestProbe
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.{ EntityRecorder, EntitySnapshot }
+import kamon.testkit.BaseKamonSpec
+
+import scala.concurrent.duration._
+import scala.concurrent.{ Await, Future }
+
+class DispatcherMetricsSpec extends BaseKamonSpec("dispatcher-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters = {
+ | akka-dispatcher {
+ | includes = [ "*" ]
+ | excludes = [ "explicitly-excluded" ]
+ | }
+ | }
+ |
+ | default-instrument-settings {
+ | gauge.refresh-interval = 1 hour
+ | min-max-counter.refresh-interval = 1 hour
+ | }
+ |}
+ |
+ |explicitly-excluded {
+ | type = "Dispatcher"
+ | executor = "fork-join-executor"
+ |}
+ |
+ |tracked-fjp {
+ | type = "Dispatcher"
+ | executor = "fork-join-executor"
+ |
+ | fork-join-executor {
+ | parallelism-min = 8
+ | parallelism-factor = 100.0
+ | parallelism-max = 22
+ | }
+ |}
+ |
+ |tracked-tpe {
+ | type = "Dispatcher"
+ | executor = "thread-pool-executor"
+ |
+ | thread-pool-executor {
+ | core-pool-size-min = 7
+ | core-pool-size-factor = 100.0
+ | max-pool-size-factor = 100.0
+ | max-pool-size-max = 21
+ | }
+ |}
+ |
+ """.stripMargin)
+
+ "the Kamon dispatcher metrics" should {
+ "respect the configured include and exclude filters" in {
+ val defaultDispatcher = forceInit(system.dispatchers.lookup("akka.actor.default-dispatcher"))
+ val fjpDispatcher = forceInit(system.dispatchers.lookup("tracked-fjp"))
+ val tpeDispatcher = forceInit(system.dispatchers.lookup("tracked-tpe"))
+ val excludedDispatcher = forceInit(system.dispatchers.lookup("explicitly-excluded"))
+
+ findDispatcherRecorder(defaultDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(fjpDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(tpeDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(excludedDispatcher) should be(empty)
+ }
+
+ "record metrics for a dispatcher with thread-pool-executor" in {
+ implicit val tpeDispatcher = system.dispatchers.lookup("tracked-tpe")
+ refreshDispatcherInstruments(tpeDispatcher)
+ collectDispatcherMetrics(tpeDispatcher)
+
+ Await.result({
+ Future.sequence {
+ for (_ ← 1 to 100) yield submit(tpeDispatcher)
+ }
+ }, 5 seconds)
+
+ refreshDispatcherInstruments(tpeDispatcher)
+ val snapshot = collectDispatcherMetrics(tpeDispatcher)
+
+ snapshot.gauge("active-threads") should not be empty
+ snapshot.gauge("pool-size").get.min should be >= 7L
+ snapshot.gauge("pool-size").get.max should be <= 21L
+ snapshot.gauge("max-pool-size").get.max should be(21)
+ snapshot.gauge("core-pool-size").get.max should be(21)
+ snapshot.gauge("processed-tasks").get.max should be(102L +- 5L)
+
+ // The processed tasks should be reset to 0 if no more tasks are submitted.
+ val secondSnapshot = collectDispatcherMetrics(tpeDispatcher)
+ secondSnapshot.gauge("processed-tasks").get.max should be(0)
+ }
+
+ "record metrics for a dispatcher with fork-join-executor" in {
+ implicit val fjpDispatcher = system.dispatchers.lookup("tracked-fjp")
+ collectDispatcherMetrics(fjpDispatcher)
+
+ Await.result({
+ Future.sequence {
+ for (_ ← 1 to 100) yield submit(fjpDispatcher)
+ }
+ }, 5 seconds)
+
+ refreshDispatcherInstruments(fjpDispatcher)
+ val snapshot = collectDispatcherMetrics(fjpDispatcher)
+
+ snapshot.minMaxCounter("parallelism").get.max should be(22)
+ snapshot.gauge("pool-size").get.min should be >= 0L
+ snapshot.gauge("pool-size").get.max should be <= 22L
+ snapshot.gauge("active-threads").get.max should be >= 0L
+ snapshot.gauge("running-threads").get.max should be >= 0L
+ snapshot.gauge("queued-task-count").get.max should be(0)
+
+ }
+
+ "clean up the metrics recorders after a dispatcher is shut down" in {
+ implicit val tpeDispatcher = system.dispatchers.lookup("tracked-tpe")
+ implicit val fjpDispatcher = system.dispatchers.lookup("tracked-fjp")
+
+ findDispatcherRecorder(fjpDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(tpeDispatcher) shouldNot be(empty)
+
+ shutdownDispatcher(tpeDispatcher)
+ shutdownDispatcher(fjpDispatcher)
+
+ findDispatcherRecorder(fjpDispatcher) should be(empty)
+ findDispatcherRecorder(tpeDispatcher) should be(empty)
+ }
+
+ }
+
+ def actorRecorderName(ref: ActorRef): String = ref.path.elements.mkString("/")
+
+ def findDispatcherRecorder(dispatcher: MessageDispatcher): Option[EntityRecorder] =
+ Kamon.metrics.find(dispatcher.id, "akka-dispatcher")
+
+ def collectDispatcherMetrics(dispatcher: MessageDispatcher): EntitySnapshot =
+ findDispatcherRecorder(dispatcher).map(_.collect(collectionContext)).get
+
+ def refreshDispatcherInstruments(dispatcher: MessageDispatcher): Unit = {
+ findDispatcherRecorder(dispatcher) match {
+ case Some(tpe: ThreadPoolExecutorDispatcherMetrics) ⇒
+ tpe.processedTasks.refreshValue()
+ tpe.activeThreads.refreshValue()
+ tpe.maxPoolSize.refreshValue()
+ tpe.poolSize.refreshValue()
+ tpe.corePoolSize.refreshValue()
+
+ case Some(fjp: ForkJoinPoolDispatcherMetrics) ⇒
+ fjp.activeThreads.refreshValue()
+ fjp.poolSize.refreshValue()
+ fjp.queuedTaskCount.refreshValue()
+ fjp.paralellism.refreshValues()
+ fjp.runningThreads.refreshValue()
+
+ case other ⇒
+ }
+ }
+
+ def forceInit(dispatcher: MessageDispatcher): MessageDispatcher = {
+ val listener = TestProbe()
+ Future {
+ listener.ref ! "init done"
+ }(dispatcher)
+ listener.expectMsg("init done")
+
+ dispatcher
+ }
+
+ def submit(dispatcher: MessageDispatcher): Future[String] = Future {
+ "hello"
+ }(dispatcher)
+
+ def shutdownDispatcher(dispatcher: MessageDispatcher): Unit = {
+ val shutdownMethod = dispatcher.getClass.getDeclaredMethod("shutdown")
+ shutdownMethod.setAccessible(true)
+ shutdownMethod.invoke(dispatcher)
+ }
+
+ override protected def afterAll(): Unit = system.shutdown()
+}
+
diff --git a/kamon-akka/src/test/scala/kamon/akka/RouterMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/akka/RouterMetricsSpec.scala
new file mode 100644
index 00000000..c4c1d9ad
--- /dev/null
+++ b/kamon-akka/src/test/scala/kamon/akka/RouterMetricsSpec.scala
@@ -0,0 +1,184 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.akka
+
+import java.nio.LongBuffer
+
+import akka.actor._
+import akka.routing._
+import akka.testkit.TestProbe
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.akka.RouterMetricsTestActor._
+import kamon.metric.EntitySnapshot
+import kamon.metric.instrument.CollectionContext
+import kamon.testkit.BaseKamonSpec
+
+import scala.concurrent.duration._
+
+class RouterMetricsSpec extends BaseKamonSpec("router-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters = {
+ | akka-router {
+ | includes = [ "user/tracked-*", "user/measuring-*", "user/stop-*" ]
+ | excludes = [ "user/tracked-explicitly-excluded-*"]
+ | }
+ | }
+ |}
+ |
+ |akka.loglevel = OFF
+ |
+ """.stripMargin)
+
+ "the Kamon router metrics" should {
+ "respect the configured include and exclude filters" in new RouterMetricsFixtures {
+ createTestRouter("tracked-router")
+ createTestRouter("non-tracked-router")
+ createTestRouter("tracked-explicitly-excluded-router")
+
+ routerMetricsRecorderOf("user/tracked-router") should not be empty
+ routerMetricsRecorderOf("user/non-tracked-router") shouldBe empty
+ routerMetricsRecorderOf("user/tracked-explicitly-excluded-router") shouldBe empty
+ }
+
+ "record the routing-time of the receive function for routers" in new RouterMetricsFixtures {
+ val listener = TestProbe()
+ val router = createTestRouter("measuring-routing-time-in-router")
+
+ router.tell(Ping, listener.ref)
+ listener.expectMsg(Pong)
+ val routerSnapshot = collectMetricsOf("user/measuring-routing-time-in-router").get
+
+ routerSnapshot.histogram("routing-time").get.numberOfMeasurements should be(1L)
+ }
+
+ "record the processing-time of the receive function for routers" in new RouterMetricsFixtures {
+ val timingsListener = TestProbe()
+ val router = createTestRouter("measuring-processing-time-in-router")
+
+ router.tell(RouterTrackTimings(sleep = Some(1 second)), timingsListener.ref)
+ val timings = timingsListener.expectMsgType[RouterTrackedTimings]
+ val routerSnapshot = collectMetricsOf("user/measuring-processing-time-in-router").get
+
+ routerSnapshot.histogram("processing-time").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ }
+
+ "record the number of errors for routers" in new RouterMetricsFixtures {
+ val listener = TestProbe()
+ val router = createTestRouter("measuring-errors-in-router")
+
+ for (i ← 1 to 10) {
+ router.tell(Fail, listener.ref)
+ }
+
+ router.tell(Ping, listener.ref)
+ listener.expectMsg(Pong)
+
+ val routerSnapshot = collectMetricsOf("user/measuring-errors-in-router").get
+ routerSnapshot.counter("errors").get.count should be(10L)
+ }
+
+ "record the time-in-mailbox for routers" in new RouterMetricsFixtures {
+ val timingsListener = TestProbe()
+ val router = createTestRouter("measuring-time-in-mailbox-in-router")
+
+ router.tell(RouterTrackTimings(sleep = Some(1 second)), timingsListener.ref)
+ val timings = timingsListener.expectMsgType[RouterTrackedTimings]
+ val routerSnapshot = collectMetricsOf("user/measuring-time-in-mailbox-in-router").get
+
+ routerSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ }
+
+ "clean up the associated recorder when the router is stopped" in new RouterMetricsFixtures {
+ val trackedRouter = createTestRouter("stop-in-router")
+ val firstRecorder = routerMetricsRecorderOf("user/stop-in-router").get
+
+ // Killing the router should remove it's RouterMetrics and registering again bellow should create a new one.
+ val deathWatcher = TestProbe()
+ deathWatcher.watch(trackedRouter)
+ trackedRouter ! PoisonPill
+ deathWatcher.expectTerminated(trackedRouter)
+
+ routerMetricsRecorderOf("user/stop-in-router").get shouldNot be theSameInstanceAs (firstRecorder)
+ }
+ }
+
+ trait RouterMetricsFixtures {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ def routerMetricsRecorderOf(routerName: String): Option[RouterMetrics] =
+ Kamon.metrics.register(RouterMetrics, routerName).map(_.recorder)
+
+ def collectMetricsOf(routerName: String): Option[EntitySnapshot] = {
+ Thread.sleep(5) // Just in case the test advances a bit faster than the actor being tested.
+ routerMetricsRecorderOf(routerName).map(_.collect(collectionContext))
+ }
+
+ def createTestRouter(routerName: String): ActorRef = {
+ val router = system.actorOf(Props[RouterMetricsTestActor].withRouter(RoundRobinRouter(nrOfInstances = 5)), routerName)
+ val initialiseListener = TestProbe()
+
+ // Ensure that the router has been created before returning.
+ router.tell(Ping, initialiseListener.ref)
+ initialiseListener.expectMsg(Pong)
+
+ // Cleanup all the metric recording instruments:
+ collectMetricsOf("user/" + routerName)
+
+ router
+ }
+ }
+}
+
+class RouterMetricsTestActor extends Actor {
+ def receive = {
+ case Discard ⇒
+ case Fail ⇒ throw new ArithmeticException("Division by zero.")
+ case Ping ⇒ sender ! Pong
+ case RouterTrackTimings(sendTimestamp, sleep) ⇒ {
+ val dequeueTimestamp = System.nanoTime()
+ sleep.map(s ⇒ Thread.sleep(s.toMillis))
+ val afterReceiveTimestamp = System.nanoTime()
+
+ sender ! RouterTrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
+ }
+ }
+}
+
+object RouterMetricsTestActor {
+ case object Ping
+ case object Pong
+ case object Fail
+ case object Discard
+
+ case class RouterTrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
+ case class RouterTrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
+ def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
+ def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala
index ee3857c3..593a7baa 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala
@@ -13,37 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.trace.instrumentation
+package kamon.instrumentation.akka
-import org.scalatest.WordSpecLike
-import akka.actor.{ Actor, Props, ActorSystem }
-
-import akka.testkit.{ ImplicitSender, TestKit }
-import kamon.trace.TraceRecorder
-import akka.pattern.{ pipe, ask }
+import akka.actor.{ Actor, Props }
+import akka.pattern.{ ask, pipe }
+import akka.routing._
import akka.util.Timeout
-import scala.concurrent.duration._
-import akka.routing.{ RoundRobinRouter }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
-class ActorCellInstrumentationSpec extends TestKit(ActorSystem("actor-cell-instrumentation-spec")) with WordSpecLike
- with ImplicitSender {
+import scala.concurrent.duration._
- implicit val executionContext = system.dispatcher
+class ActorCellInstrumentationSpec extends BaseKamonSpec("actor-cell-instrumentation-spec") {
+ implicit lazy val executionContext = system.dispatcher
"the message passing instrumentation" should {
"propagate the TraceContext using bang" in new EchoActorFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("bang-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("bang-reply")) {
ctxEchoActor ! "test"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"propagate the TraceContext using tell" in new EchoActorFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("tell-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("tell-reply")) {
ctxEchoActor.tell("test", testActor)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
@@ -51,37 +48,40 @@ class ActorCellInstrumentationSpec extends TestKit(ActorSystem("actor-cell-instr
"propagate the TraceContext using ask" in new EchoActorFixture {
implicit val timeout = Timeout(1 seconds)
- val testTraceContext = TraceRecorder.withNewTraceContext("ask-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("ask-reply")) {
// The pipe pattern use Futures internally, so FutureTracing test should cover the underpinnings of it.
(ctxEchoActor ? "test") pipeTo (testActor)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
- "propagate the TraceContext to actors behind a router" in new RoutedEchoActorFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("router-reply") {
- ctxEchoActor ! "test"
- TraceRecorder.currentContext
+ "propagate the TraceContext to actors behind a pool router" in new RoundRobinRouterFixture {
+ val testTraceContext = TraceContext.withContext(newContext("router-reply")) {
+ router ! "test"
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
+
}
trait EchoActorFixture {
val ctxEchoActor = system.actorOf(Props[TraceContextEcho])
}
- trait RoutedEchoActorFixture extends EchoActorFixture {
- override val ctxEchoActor = system.actorOf(Props[TraceContextEcho].withRouter(RoundRobinRouter(nrOfInstances = 1)))
+ trait RoundRobinRouterFixture {
+ val router = system.actorOf(Props[TraceContextEcho].withRouter(
+ RoundRobinRouter(nrOfInstances = 5)), "pool-router")
}
+
}
class TraceContextEcho extends Actor {
def receive = {
- case msg: String ⇒ sender ! TraceRecorder.currentContext
+ case msg: String ⇒ sender ! TraceContext.currentContext
}
}
diff --git a/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala
new file mode 100644
index 00000000..143c816d
--- /dev/null
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala
@@ -0,0 +1,74 @@
+/* ===================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================================================== */
+package kamon.instrumentation.akka
+
+import akka.actor.{ Actor, ActorLogging, Props }
+import akka.event.Logging.LogEvent
+import com.typesafe.config.ConfigFactory
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceLocal.AvailableToMdc
+import kamon.trace.logging.MdcKeysSupport
+import kamon.trace.{ TraceContextAware, TraceLocal, TraceContext }
+import org.scalatest.Inspectors
+import org.slf4j.MDC
+
+class ActorLoggingInstrumentationSpec extends BaseKamonSpec("actor-logging-instrumentation-spec") with Inspectors with MdcKeysSupport {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |}
+ """.stripMargin)
+
+ "the ActorLogging instrumentation" should {
+ "attach the TraceContext (if available) to log events" in {
+ val loggerActor = system.actorOf(Props[LoggerActor])
+ system.eventStream.subscribe(testActor, classOf[LogEvent])
+
+ val testTraceContext = TraceContext.withContext(newContext("logging")) {
+ loggerActor ! "info"
+ TraceContext.currentContext
+ }
+
+ fishForMessage() {
+ case event: LogEvent if event.message.toString startsWith "TraceContext" ⇒
+ val ctxInEvent = event.asInstanceOf[TraceContextAware].traceContext
+ ctxInEvent === testTraceContext
+
+ case event: LogEvent ⇒ false
+ }
+ }
+
+ "allow retrieve a value from the MDC when was created a key of type AvailableToMdc" in {
+ val testString = "Hello World"
+ TraceContext.withContext(newContext("logging-with-mdc")) {
+ TraceLocal.store(AvailableToMdc("some-cool-key"))(testString)
+
+ withMdc {
+ MDC.get("other-key") shouldBe (null)
+ MDC.get("some-cool-key") should equal(testString)
+ }
+ }
+ }
+ }
+}
+
+class LoggerActor extends Actor with ActorLogging {
+ def receive = {
+ case "info" ⇒ log.info("TraceContext(name = {}, token = {})", TraceContext.currentContext.name, TraceContext.currentContext.token)
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala
index d79ccbe0..cf5f1b5b 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala
@@ -1,46 +1,70 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.instrumentation.akka
import akka.actor.SupervisorStrategy.{ Escalate, Restart, Resume, Stop }
import akka.actor._
-import akka.testkit.{ ImplicitSender, TestKit }
-import kamon.trace.{ EmptyTraceContext, TraceRecorder }
+import akka.testkit.ImplicitSender
+import com.typesafe.config.ConfigFactory
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ EmptyTraceContext, TraceContext }
import org.scalatest.WordSpecLike
import scala.concurrent.duration._
import scala.util.control.NonFatal
-class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-system-message-instrumentation-spec"))
- with WordSpecLike with ImplicitSender {
+class ActorSystemMessageInstrumentationSpec extends BaseKamonSpec("actor-system-message-instrumentation-spec") with WordSpecLike {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loglevel = OFF
+ |}
+ """.stripMargin)
- implicit val executionContext = system.dispatcher
+ implicit lazy val executionContext = system.dispatcher
"the system message passing instrumentation" should {
"keep the TraceContext while processing the Create message in top level actors" in {
- val testTraceContext = TraceRecorder.withNewTraceContext("creating-top-level-actor") {
+ val testTraceContext = TraceContext.withContext(newContext("creating-top-level-actor")) {
system.actorOf(Props(new Actor {
- testActor ! TraceRecorder.currentContext
+ testActor ! TraceContext.currentContext
def receive: Actor.Receive = { case any ⇒ }
}))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"keep the TraceContext while processing the Create message in non top level actors" in {
- val testTraceContext = TraceRecorder.withNewTraceContext("creating-non-top-level-actor") {
+ val testTraceContext = TraceContext.withContext(newContext("creating-non-top-level-actor")) {
system.actorOf(Props(new Actor {
def receive: Actor.Receive = {
case any ⇒
context.actorOf(Props(new Actor {
- testActor ! TraceRecorder.currentContext
+ testActor ! TraceContext.currentContext
def receive: Actor.Receive = { case any ⇒ }
}))
}
})) ! "any"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
@@ -50,9 +74,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
"the actor is resumed" in {
val supervisor = supervisorWithDirective(Resume)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-resume") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-resume")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -65,9 +89,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
"the actor is restarted" in {
val supervisor = supervisorWithDirective(Restart, sendPreRestart = true, sendPostRestart = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-restart") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-restart")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -82,9 +106,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
"the actor is stopped" in {
val supervisor = supervisorWithDirective(Stop, sendPostStop = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-stop") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-stop")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -95,9 +119,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
"the failure is escalated" in {
val supervisor = supervisorWithDirective(Escalate, sendPostStop = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-escalate") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-escalate")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -115,7 +139,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
val child = context.actorOf(Props(new Parent))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
- case NonFatal(throwable) ⇒ testActor ! TraceRecorder.currentContext; Stop
+ case NonFatal(throwable) ⇒ testActor ! TraceContext.currentContext; Stop
}
def receive = {
@@ -127,7 +151,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
val child = context.actorOf(Props(new Child))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
- case NonFatal(throwable) ⇒ testActor ! TraceRecorder.currentContext; directive
+ case NonFatal(throwable) ⇒ testActor ! TraceContext.currentContext; directive
}
def receive: Actor.Receive = {
@@ -135,7 +159,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
}
override def postStop(): Unit = {
- if (sendPostStop) testActor ! TraceRecorder.currentContext
+ if (sendPostStop) testActor ! TraceContext.currentContext
super.postStop()
}
}
@@ -143,26 +167,26 @@ class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-s
class Child extends Actor {
def receive = {
case "fail" ⇒ throw new ArithmeticException("Division by zero.")
- case "context" ⇒ sender ! TraceRecorder.currentContext
+ case "context" ⇒ sender ! TraceContext.currentContext
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
- if (sendPreRestart) testActor ! TraceRecorder.currentContext
+ if (sendPreRestart) testActor ! TraceContext.currentContext
super.preRestart(reason, message)
}
override def postRestart(reason: Throwable): Unit = {
- if (sendPostRestart) testActor ! TraceRecorder.currentContext
+ if (sendPostRestart) testActor ! TraceContext.currentContext
super.postRestart(reason)
}
override def postStop(): Unit = {
- if (sendPostStop) testActor ! TraceRecorder.currentContext
+ if (sendPostStop) testActor ! TraceContext.currentContext
super.postStop()
}
override def preStart(): Unit = {
- if (sendPreStart) testActor ! TraceRecorder.currentContext
+ if (sendPreStart) testActor ! TraceContext.currentContext
super.preStart()
}
}
diff --git a/kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala
new file mode 100644
index 00000000..d925fbf6
--- /dev/null
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala
@@ -0,0 +1,137 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.akka
+
+import java.util.concurrent.atomic.AtomicInteger
+
+import akka.actor._
+import akka.event.Logging.Warning
+import akka.pattern.ask
+import akka.testkit.TestProbe
+import akka.util.Timeout
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.akka.Akka
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ TraceContext, TraceContextAware }
+
+import scala.concurrent.duration._
+
+class AskPatternInstrumentationSpec extends BaseKamonSpec("ask-pattern-tracing-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loglevel = OFF
+ |}
+ """.stripMargin)
+
+ implicit lazy val ec = system.dispatcher
+ implicit val askTimeout = Timeout(10 millis)
+
+ // TODO: Make this work with ActorSelections
+
+ "the AskPatternInstrumentation" when {
+ "configured in heavyweight mode" should {
+ "log a warning with a full stack trace and the TraceContext taken from the moment the ask was triggered for a actor" in new NoReplyFixture {
+ setAskPatternTimeoutWarningMode("heavyweight")
+
+ expectTimeoutWarning() {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
+ noReplyActorRef ? "hello"
+ TraceContext.currentContext
+ }
+ }
+ }
+ }
+
+ "configured in lightweight mode" should {
+ "log a warning with a short source location description and the TraceContext taken from the moment the ask was triggered for a actor" in new NoReplyFixture {
+ setAskPatternTimeoutWarningMode("lightweight")
+
+ expectTimeoutWarning(messageSizeLimit = Some(1)) {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
+ noReplyActorRef ? "hello"
+ TraceContext.currentContext
+ }
+ }
+ }
+ }
+
+ "configured in off mode" should {
+ "should not log any warning messages" in new NoReplyFixture {
+ setAskPatternTimeoutWarningMode("off")
+
+ expectTimeoutWarning(expectWarning = false) {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
+ noReplyActorRef ? "hello"
+ TraceContext.currentContext
+ }
+ }
+ }
+ }
+ }
+
+ def expectTimeoutWarning(messageSizeLimit: Option[Int] = None, expectWarning: Boolean = true)(thunk: ⇒ TraceContext): Unit = {
+ val listener = warningListener()
+ val testTraceContext = thunk
+
+ if (expectWarning) {
+ val warning = listener.fishForMessage() {
+ case Warning(_, _, msg) if msg.toString.startsWith("Timeout triggered for ask pattern registered at") ⇒ true
+ case others ⇒ false
+ }.asInstanceOf[Warning]
+
+ warning.asInstanceOf[TraceContextAware].traceContext should equal(testTraceContext)
+ messageSizeLimit.map { messageLimit ⇒
+ warning.message.toString.lines.size should be(messageLimit)
+ }
+ } else {
+ listener.expectNoMsg()
+ }
+ }
+
+ def warningListener(): TestProbe = {
+ val listener = TestProbe()
+ system.eventStream.subscribe(listener.ref, classOf[Warning])
+ listener
+ }
+
+ def setAskPatternTimeoutWarningMode(mode: String): Unit = {
+ val target = Kamon(Akka)
+ val field = target.getClass.getDeclaredField("askPatternTimeoutWarning")
+ field.setAccessible(true)
+ field.set(target, mode)
+ }
+
+ val fixtureCounter = new AtomicInteger(0)
+
+ trait NoReplyFixture {
+ def noReplyActorRef: ActorRef = system.actorOf(Props[NoReply], "no-reply-" + fixtureCounter.incrementAndGet())
+
+ def noReplyActorSelection: ActorSelection = {
+ val target = noReplyActorRef
+ system.actorSelection(target.path)
+ }
+ }
+}
+
+class NoReply extends Actor {
+ def receive = {
+ case any ⇒
+ }
+}
diff --git a/kamon-core/src/main/java/kamon/util/Example.java b/kamon-core/src/main/java/kamon/util/Example.java
deleted file mode 100644
index a5031182..00000000
--- a/kamon-core/src/main/java/kamon/util/Example.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package kamon.util;
-
-public class Example {
-
- public static void main(String args[]) {
-
- }
-}
diff --git a/kamon-core/src/main/java/kamon/util/GlobPathFilter.java b/kamon-core/src/main/java/kamon/util/GlobPathFilter.java
index a000e2a0..0c7b999c 100644
--- a/kamon-core/src/main/java/kamon/util/GlobPathFilter.java
+++ b/kamon-core/src/main/java/kamon/util/GlobPathFilter.java
@@ -1,6 +1,6 @@
/*
* =========================================================================================
- * Copyright 2013 the kamon project <http://kamon.io/>
+ * Copyright 2013-2014 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
@@ -14,63 +14,62 @@
* =========================================================================================
*/
-// This file was copied from: https://github.com/jboss-modules/jboss-modules/blob/master/src/main/java/org/jboss/modules/filter/GlobPathFilter.java
package kamon.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
-* Default implementation of PathFilter. Uses glob based includes and excludes to determine whether to export.
-*
-* @author John E. Bailey
-* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
-*/
-public final class GlobPathFilter {
+ * Default implementation of PathFilter. Uses glob based includes and excludes to determine whether to export.
+ *
+ * @author John E. Bailey
+ * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
+ */
+public final class GlobPathFilter {
private static final Pattern GLOB_PATTERN = Pattern.compile("(\\*\\*?)|(\\?)|(\\\\.)|(/+)|([^*?]+)");
private final String glob;
private final Pattern pattern;
/**
-* Construct a new instance.
-*
-* @param glob the path glob to match
-*/
+ * Construct a new instance.
+ *
+ * @param glob the path glob to match
+ */
public GlobPathFilter(final String glob) {
pattern = getGlobPattern(glob);
this.glob = glob;
}
/**
-* Determine whether a path should be accepted.
-*
-* @param path the path to check
-* @return true if the path should be accepted, false if not
-*/
+ * Determine whether a path should be accepted.
+ *
+ * @param path the path to check
+ * @return true if the path should be accepted, false if not
+ */
public boolean accept(final String path) {
return pattern.matcher(path).matches();
}
/**
- * Get a regular expression pattern which accept any path names which match the given glob. The glob patterns
- * function similarly to {@code ant} file patterns. Valid metacharacters in the glob pattern include:
- * <ul>
- * <li><code>"\"</code> - escape the next character (treat it literally, even if it is itself a recognized metacharacter)</li>
- * <li><code>"?"</code> - match any non-slash character</li>
- * <li><code>"*"</code> - match zero or more non-slash characters</li>
- * <li><code>"**"</code> - match zero or more characters, including slashes</li>
- * <li><code>"/"</code> - match one or more slash characters. Consecutive {@code /} characters are collapsed down into one.</li>
- * </ul>
- * In addition, any glob pattern matches all subdirectories thereof. A glob pattern ending in {@code /} is equivalent
- * to a glob pattern ending in <code>/**</code> in that the named directory is not itself included in the glob.
- * <p/>
- * <b>See also:</b> <a href="http://ant.apache.org/manual/dirtasks.html#patterns">"Patterns" in the Ant Manual</a>
- *
- * @param glob the glob to match
- *
- * @return the pattern
- */
+ * Get a regular expression pattern which accept any path names which match the given glob. The glob patterns
+ * function similarly to {@code ant} file patterns. Valid metacharacters in the glob pattern include:
+ * <ul>
+ * <li><code>"\"</code> - escape the next character (treat it literally, even if it is itself a recognized metacharacter)</li>
+ * <li><code>"?"</code> - match any non-slash character</li>
+ * <li><code>"*"</code> - match zero or more non-slash characters</li>
+ * <li><code>"**"</code> - match zero or more characters, including slashes</li>
+ * <li><code>"/"</code> - match one or more slash characters. Consecutive {@code /} characters are collapsed down into one.</li>
+ * </ul>
+ * In addition, any glob pattern matches all subdirectories thereof. A glob pattern ending in {@code /} is equivalent
+ * to a glob pattern ending in <code>/**</code> in that the named directory is not itself included in the glob.
+ * <p/>
+ * <b>See also:</b> <a href="http://ant.apache.org/manual/dirtasks.html#patterns">"Patterns" in the Ant Manual</a>
+ *
+ * @param glob the glob to match
+ *
+ * @return the pattern
+ */
private static Pattern getGlobPattern(final String glob) {
StringBuilder patternBuilder = new StringBuilder();
final Matcher m = GLOB_PATTERN.matcher(glob);
@@ -81,7 +80,7 @@ public final class GlobPathFilter {
if ((grp = m.group(1)) != null) {
// match a * or **
if (grp.length() == 2) {
- // it's a **
+ // it's a *workers are able to process multiple metrics*
patternBuilder.append(".*");
} else {
// it's a *
@@ -105,32 +104,7 @@ public final class GlobPathFilter {
if (lastWasSlash) {
// ends in /, append **
patternBuilder.append(".*");
- } else {
- patternBuilder.append("(?:/.*)?");
}
return Pattern.compile(patternBuilder.toString());
}
-
- public int hashCode() {
- return glob.hashCode() + 13;
- }
-
- public boolean equals(final Object obj) {
- return obj instanceof GlobPathFilter && equals((GlobPathFilter) obj);
- }
-
- public boolean equals(final GlobPathFilter obj) {
- return obj != null && obj.pattern.equals(pattern);
- }
-
- public String toString() {
- final StringBuilder b = new StringBuilder();
- b.append("match ");
- if (glob != null) {
- b.append('"').append(glob).append('"');
- } else {
- b.append('/').append(pattern).append('/');
- }
- return b.toString();
- }
} \ No newline at end of file
diff --git a/kamon-core/src/main/protobuf/TraceContextAwareWireFormats.proto b/kamon-core/src/main/protobuf/TraceContextAwareWireFormats.proto
deleted file mode 100644
index d4ee21b5..00000000
--- a/kamon-core/src/main/protobuf/TraceContextAwareWireFormats.proto
+++ /dev/null
@@ -1,31 +0,0 @@
-import "WireFormats.proto";
-
-option java_package = "akka.remote.instrumentation";
-option optimize_for = SPEED;
-
-
-/************************************************
- * Kamon-specific additions to the protocol
- ************************************************/
-
-message AckAndTraceContextAwareEnvelopeContainer {
- optional AcknowledgementInfo ack = 1;
- optional TraceContextAwareRemoteEnvelope envelope = 2;
-}
-
-message TraceContextAwareRemoteEnvelope {
- required ActorRefData recipient = 1;
- required SerializedMessage message = 2;
- optional ActorRefData sender = 4;
- optional fixed64 seq = 5;
-
- optional RemoteTraceContext traceContext = 15;
-}
-
-message RemoteTraceContext {
- required string traceName = 1;
- required string traceToken = 2;
- required bool isOpen = 3;
- required fixed64 startMilliTime = 4;
-}
-
diff --git a/kamon-core/src/main/protobuf/WireFormats.proto b/kamon-core/src/main/protobuf/WireFormats.proto
deleted file mode 100644
index 98a954cc..00000000
--- a/kamon-core/src/main/protobuf/WireFormats.proto
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
- */
-
-// Extracted from https://github.com/akka/akka/blob/master/akka-remote/src/main/protobuf/WireFormats.proto
-
-
-option java_package = "akka.remote";
-option optimize_for = SPEED;
-
-/******************************************
- * Remoting message formats
- ******************************************/
-
-
-message AckAndEnvelopeContainer {
- optional AcknowledgementInfo ack = 1;
- optional RemoteEnvelope envelope = 2;
-}
-
-/**
- * Defines a remote message.
- */
-message RemoteEnvelope {
- required ActorRefData recipient = 1;
- required SerializedMessage message = 2;
- optional ActorRefData sender = 4;
- optional fixed64 seq = 5;
-}
-
-message AcknowledgementInfo {
- required fixed64 cumulativeAck = 1;
- repeated fixed64 nacks = 2;
-}
-
-/**
- * Defines a remote ActorRef that "remembers" and uses its original Actor instance
- * on the original node.
- */
-message ActorRefData {
- required string path = 1;
-}
-
-/**
- * Defines a message.
- */
-message SerializedMessage {
- required bytes message = 1;
- required int32 serializerId = 2;
- optional bytes messageManifest = 3;
-}
-
-/**
- * Defines akka.remote.DaemonMsgCreate
- */
-message DaemonMsgCreateData {
- required PropsData props = 1;
- required DeployData deploy = 2;
- required string path = 3;
- required ActorRefData supervisor = 4;
-}
-
-/**
- * Serialization of akka.actor.Props
- */
-message PropsData {
- required DeployData deploy = 2;
- required string clazz = 3;
- repeated bytes args = 4;
- repeated string classes = 5;
-}
-
-/**
- * Serialization of akka.actor.Deploy
- */
-message DeployData {
- required string path = 1;
- optional bytes config = 2;
- optional bytes routerConfig = 3;
- optional bytes scope = 4;
- optional string dispatcher = 5;
-}
-
-
-/******************************************
- * Akka Protocol message formats
- ******************************************/
-
-/**
- * Message format of Akka Protocol.
- * Message contains either a payload or an instruction.
- */
-message AkkaProtocolMessage {
- optional bytes payload = 1;
- optional AkkaControlMessage instruction = 2;
-}
-
-/**
- * Defines some control messages for the remoting
- */
-message AkkaControlMessage {
- required CommandType commandType = 1;
- optional AkkaHandshakeInfo handshakeInfo = 2;
-}
-
-message AkkaHandshakeInfo {
- required AddressData origin = 1;
- required fixed64 uid = 2;
- optional string cookie = 3;
-
-}
-
-/**
- * Defines the type of the AkkaControlMessage command type
- */
-enum CommandType {
- ASSOCIATE = 1;
- DISASSOCIATE = 2;
- HEARTBEAT = 3;
- DISASSOCIATE_SHUTTING_DOWN = 4; // Remote system is going down and will not accepts new connections
- DISASSOCIATE_QUARANTINED = 5; // Remote system refused the association since the current system is quarantined
-}
-
-/**
- * Defines a remote address.
- */
-message AddressData {
- required string system = 1;
- required string hostname = 2;
- required uint32 port = 3;
- optional string protocol = 4;
-}
diff --git a/kamon-core/src/main/resources/META-INF/aop.xml b/kamon-core/src/main/resources/META-INF/aop.xml
index 3a029ace..2ffb8b09 100644
--- a/kamon-core/src/main/resources/META-INF/aop.xml
+++ b/kamon-core/src/main/resources/META-INF/aop.xml
@@ -2,41 +2,14 @@
<aspectj>
<aspects>
- <!-- Disable AspectJ Weaver not present error -->
- <aspect name="kamon.instrumentation.AspectJWeaverMissingWarning"/>
- <!-- Actors -->
- <aspect name="akka.instrumentation.TraceContextIntoRepointableActorRefMixin"/>
- <aspect name="akka.instrumentation.TraceContextIntoSystemMessageMixin"/>
- <aspect name="akka.instrumentation.ActorSystemMessageInstrumentation"/>
- <aspect name="akka.instrumentation.TraceContextIntoEnvelopeMixin"/>
- <aspect name="akka.instrumentation.ActorCellMetricsIntoActorCellMixin"/>
- <aspect name="akka.instrumentation.ActorCellInstrumentation"/>
- <aspect name="akka.instrumentation.ActorLoggingInstrumentation"/>
+ <!-- Notify that AspectJ is present -->
+ <aspect name="kamon.supervisor.AspectJPresent"/>
- <!-- Dispatchers -->
- <aspect name="akka.instrumentation.DispatcherInstrumentation"/>
- <aspect name="akka.instrumentation.DispatcherMetricCollectionInfoIntoDispatcherMixin"/>
-
- <!-- Futures -->
- <aspect name="kamon.instrumentation.scala.FutureInstrumentation"/>
- <aspect name="kamon.instrumentation.scalaz.FutureInstrumentation"/>
-
- <!-- Patterns -->
- <aspect name="akka.instrumentation.AskPatternInstrumentation"/>
</aspects>
<weaver>
- <include within="scala.concurrent..*"/>
- <include within="scalaz.concurrent..*"/>
- <include within="akka..*"/>
- <include within="spray..*"/>
<include within="kamon..*"/>
-
- <!-- For some weird reason ByteString produces a java.lang.VerifyError after going through the weaver. -->
- <exclude within="akka.util.ByteString"/>
- <!-- Exclude CallingThreadDispatcher, is only for test purposes -->
- <exclude within="akka.testkit.CallingThreadDispatcher"/>
</weaver>
</aspectj> \ No newline at end of file
diff --git a/kamon-core/src/main/resources/reference.conf b/kamon-core/src/main/resources/reference.conf
index 12e21bd7..a648c01a 100644
--- a/kamon-core/src/main/resources/reference.conf
+++ b/kamon-core/src/main/resources/reference.conf
@@ -3,18 +3,10 @@
# ================================== #
kamon {
-
- # Default dispatcher for all Kamon components, unless a more specific one is configured.
- default-dispatcher = "akka.actor.default-dispatcher"
-
- metrics {
+ metric {
# Time interval for collecting all metrics and send the snapshots to all subscribed actors.
- tick-interval = 1 second
-
- # Time interval for recording values on all registered gauges.
- gauge-recording-interval = 100 milliseconds
-
+ tick-interval = 10 seconds
# Default size for the LongBuffer that gets allocated for metrics collection and merge. The
# value should correspond to the highest number of different buckets with values that might
@@ -31,105 +23,141 @@ kamon {
# it might be ok for you to turn this error off.
disable-aspectj-weaver-missing-error = false
+ # Specify if entities that do not match any include/exclude filter should be tracked.
+ track-unmatched-entities = yes
- dispatchers {
-
- # Dispatcher for periodical gauge value recordings.
- gauge-recordings = ${kamon.default-dispatcher}
-
- # Dispatcher for subscriptions and metrics collection actors.
- metric-subscriptions = ${kamon.default-dispatcher}
- }
-
-
- filters = [
- {
- actor {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- router {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- trace {
- includes = [ "*" ]
- excludes = []
- }
- },
- {
- dispatcher {
- includes = [ "default-dispatcher" ]
- excludes = []
- }
+ filters {
+ trace {
+ includes = [ "**" ]
+ excludes = [ ]
}
- ]
+ }
- precision {
- default-histogram-precision {
+ # Default instrument settings for histograms, min max counters and gaugues. The actual settings to be used when
+ # creating a instrument is determined by merging the default settings, code settings and specific instrument
+ # settings using the following priorities (top wins):
+
+ # - any setting in `kamon.metric.instrument-settings` for the given category/instrument.
+ # - code settings provided when creating the instrument.
+ # - `default-instrument-settings`.
+ #
+ default-instrument-settings {
+ histogram {
+ precision = normal
+ lowest-discernible-value = 1
highest-trackable-value = 3600000000000
- significant-value-digits = 2
}
- default-min-max-counter-precision {
- refresh-interval = 100 milliseconds
+ min-max-counter {
+ precision = normal
+ lowest-discernible-value = 1
highest-trackable-value = 999999999
- significant-value-digits = 2
+ refresh-interval = 100 milliseconds
}
- default-gauge-precision {
+ gauge {
+ precision = normal
+ lowest-discernible-value = 1
+ highest-trackable-value = 3600000000000
refresh-interval = 100 milliseconds
- highest-trackable-value = 999999999
- significant-value-digits = 2
}
+ }
- actor {
- processing-time = ${kamon.metrics.precision.default-histogram-precision}
- time-in-mailbox = ${kamon.metrics.precision.default-histogram-precision}
- mailbox-size = ${kamon.metrics.precision.default-min-max-counter-precision}
- }
+ # Custom configurations for category instruments. The settings provided in this section will override the default
+ # and code instrument settings as explained in the `default-instrument-settings` key. There is no need to provide
+ # full instrument settings in this section, only the settings that should be overriden must be included. Example:
+ # if you wish to change the precision and lowest discernible value of the `elapsed-time` instrument for the `trace`
+ # category, you should include the following configuration in your application.conf file:
+ #
+ # kamon.metric.instrument-settings.trace {
+ # elapsed-time {
+ # precision = fine
+ # lowest-discernible-value = 1000
+ # }
+ # }
+ #
+ # In this example, the value for the `highest-trackable-value` setting will be either the code setting or the default
+ # setting, depending on how the `elapsed-time` metric is created.
+ instrument-settings {
- router {
- processing-time = ${kamon.metrics.precision.default-histogram-precision}
- time-in-mailbox = ${kamon.metrics.precision.default-histogram-precision}
- }
+ }
+ }
- trace {
- elapsed-time = ${kamon.metrics.precision.default-histogram-precision}
- segment = ${kamon.metrics.precision.default-histogram-precision}
- }
- dispatcher {
- maximum-pool-size {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- running-thread-count {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- queued-task-count {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- pool-size {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
+ trace {
+
+ # Level of detail used when recording trace information. The posible values are:
+ # - metrics-only: metrics for all included traces and all segments are recorded, but no Trace messages will be sent
+ # to the subscriptors of trace data.
+ # - simple-trace: metrics for all included traces and all segments are recorded and additionally a Trace message
+ # containing the trace and segments details and metadata.
+ level-of-detail = metrics-only
+
+ # Sampling strategy to apply when the tracing level is set to `simple-trace`. The options are: all, random, ordered
+ # and threshold. The details of each sampler are bellow.
+ sampling = random
+
+ # Use a ThreadLocalRandom to generate numbers between 1 and 100, if the random number is less or equal to .chance
+ # then tracing information will be gathered and reported for the current trace.
+ random-sampler {
+ chance = 10
+ }
+
+ # Use a AtomicLong to ensure that every .sample-interval number of requests tracing information will be gathered and
+ # reported.
+ ordered-sampler {
+ # must be power of two
+ sample-interval = 8
+ }
+
+ # Gather tracing information for all traces but only report those whose elapsed-time is equal or greated to the
+ # .minimum-elapsed-time setting.
+ threshold-sampler {
+ minimum-elapsed-time = 1 second
+ }
+
+ incubator {
+ # Minimum time to stay in the trace incubator before checking if the trace should not be incubated anymore. No
+ # checks are made at least until this period has passed.
+ min-incubation-time = 5 seconds
+
+ # Time to wait between incubation checks. After min-incubation-time, a trace is checked using this interval and if
+ # if shouldn't be incubated anymore, the TraceInfo is collected and reported for it.
+ check-interval = 1 second
+
+ # Max amount of time that a trace can be in the incubator. If this time is reached for a given trace then it will
+ # be reported with whatever information is available at the moment, logging a warning for each segment that remains
+ # open after this point.
+ max-incubation-time = 20 seconds
+ }
+ }
+
+
+ # All settings included under the internal-config key will be used to repleace the akka.* and spray.* settings. By
+ # doing this we avoid applying custom settings that might make sense for the user application to the internal actor
+ # system and Spray facilities used by Kamon.
+ internal-config {
+
+ akka.actor.default-dispatcher {
+ fork-join-executor {
+ parallelism-min = 2
+ parallelism-factor = 2.0
+ parallelism-max = 10
}
}
+
+ spray {
+
+ }
}
- trace {
+ # Controls whether the AspectJ Weaver missing warning should be displayed if any Kamon module requiring AspectJ is
+ # found in the classpath but the application is started without the AspectJ Weaver.
+ show-aspectj-missing-warning = yes
- # If ask-pattern-tracing is enabled, a WARN level log message will be generated if a future generated by the `ask`
- # pattern fails with a `AskTimeoutException` and the log message will contain a stack trace captured at the moment
- # the future was created.
- ask-pattern-tracing = off
+ modules {
+ # Just a place holder to ensure that the key is always available. Non-core Kamon modules should provide their
+ # settings in a module-info section.
}
} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/Kamon.scala b/kamon-core/src/main/scala/kamon/Kamon.scala
index dfebd3a5..ab9ce05e 100644
--- a/kamon-core/src/main/scala/kamon/Kamon.scala
+++ b/kamon-core/src/main/scala/kamon/Kamon.scala
@@ -14,10 +14,72 @@
*/
package kamon
-import akka.actor._
+import _root_.akka.actor
+import _root_.akka.actor._
+import com.typesafe.config.{ ConfigFactory, Config }
+import kamon.metric._
+import kamon.trace.{ TracerExtensionImpl, TracerExtension }
object Kamon {
- trait Extension extends akka.actor.Extension
- def apply[T <: Extension](key: ExtensionId[T])(implicit system: ActorSystem): T = key(system)
+ trait Extension extends actor.Extension
+
+ private case class KamonCoreComponents(
+ metrics: MetricsExtension,
+ tracer: TracerExtension,
+ userMetrics: UserMetricsExtension)
+
+ @volatile private var _system: ActorSystem = _
+ @volatile private var _coreComponents: Option[KamonCoreComponents] = None
+
+ def start(config: Config): Unit = synchronized {
+ def resolveInternalConfig: Config = {
+ val internalConfig = config.getConfig("kamon.internal-config")
+
+ config
+ .withoutPath("akka")
+ .withoutPath("spray")
+ .withFallback(internalConfig)
+ }
+
+ if (_coreComponents.isEmpty) {
+ val metrics = MetricsExtensionImpl(config)
+ val simpleMetrics = UserMetricsExtensionImpl(metrics)
+ val tracer = TracerExtensionImpl(metrics, config)
+
+ _coreComponents = Some(KamonCoreComponents(metrics, tracer, simpleMetrics))
+ _system = ActorSystem("kamon", resolveInternalConfig)
+
+ metrics.start(_system)
+ tracer.start(_system)
+
+ } else sys.error("Kamon has already been started.")
+ }
+
+ def start(): Unit =
+ start(ConfigFactory.load)
+
+ def metrics: MetricsExtension =
+ ifStarted(_.metrics)
+
+ def tracer: TracerExtension =
+ ifStarted(_.tracer)
+
+ def userMetrics: UserMetricsExtension =
+ ifStarted(_.userMetrics)
+
+ def apply[T <: Kamon.Extension](key: ExtensionId[T]): T =
+ ifStarted { _ ⇒
+ if (_system ne null)
+ key(_system)
+ else
+ sys.error("Cannot retrieve extensions while Kamon is being initialized.")
+ }
+
+ def extension[T <: Kamon.Extension](key: ExtensionId[T]): T =
+ apply(key)
+
+ private def ifStarted[T](thunk: KamonCoreComponents ⇒ T): T =
+ _coreComponents.map(thunk(_)) getOrElse (sys.error("Kamon has not been started yet."))
+
}
diff --git a/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
index 0dd189f6..553d59ed 100644
--- a/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
@@ -1,99 +1,41 @@
-package kamon.http
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.instrument.Counter
-import kamon.metric._
-
-import scala.collection.concurrent.TrieMap
-
-object HttpServerMetrics extends MetricGroupIdentity {
- import Metrics.AtomicGetOrElseUpdateForTriemap
-
- val name: String = "http-server-metrics-recorder"
- val category = new MetricGroupCategory {
- val name: String = "http-server"
- }
-
- type TraceName = String
- type StatusCode = String
-
- case class CountPerStatusCode(statusCode: String) extends MetricIdentity {
- def name: String = statusCode
- }
-
- case class TraceCountPerStatus(traceName: TraceName, statusCode: StatusCode) extends MetricIdentity {
- def name: String = traceName + "_" + statusCode
- }
-
- class HttpServerMetricsRecorder extends MetricGroupRecorder {
-
- private val counters = TrieMap[StatusCode, Counter]()
- private val countersPerTrace = TrieMap[TraceName, TrieMap[StatusCode, Counter]]()
-
- def recordResponse(statusCode: StatusCode): Unit = recordResponse(statusCode, 1L)
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
- def recordResponse(statusCode: StatusCode, count: Long): Unit =
- counters.atomicGetOrElseUpdate(statusCode, Counter()).increment(count)
-
- def recordResponse(traceName: TraceName, statusCode: StatusCode): Unit = recordResponse(traceName, statusCode, 1L)
-
- def recordResponse(traceName: TraceName, statusCode: StatusCode, count: Long): Unit = {
- recordResponse(statusCode, count)
- countersPerTrace.atomicGetOrElseUpdate(traceName, TrieMap()).atomicGetOrElseUpdate(statusCode, Counter()).increment(count)
- }
-
- def collect(context: CollectionContext): HttpServerMetricsSnapshot = {
- val countsPerStatusCode = counters.map {
- case (statusCode, counter) ⇒ (statusCode, counter.collect(context))
- }.toMap
-
- val countsPerTraceAndStatus = countersPerTrace.map {
- case (traceName, countsPerStatus) ⇒
- (traceName, countsPerStatus.map { case (statusCode, counter) ⇒ (statusCode, counter.collect(context)) }.toMap)
- }.toMap
-
- HttpServerMetricsSnapshot(countsPerStatusCode, countsPerTraceAndStatus)
- }
-
- def cleanup: Unit = {}
- }
+package kamon.http
- case class HttpServerMetricsSnapshot(countsPerStatusCode: Map[StatusCode, Counter.Snapshot],
- countsPerTraceAndStatusCode: Map[TraceName, Map[StatusCode, Counter.Snapshot]]) extends MetricGroupSnapshot {
+import kamon.metric.{ EntityRecorderFactory, GenericEntityRecorder }
+import kamon.metric.instrument.InstrumentFactory
- type GroupSnapshotType = HttpServerMetricsSnapshot
+/**
+ * Counts HTTP response status codes into per status code and per trace name + status counters. If recording a HTTP
+ * response with status 500 for the trace "GetUser", the counter with name "500" as well as the counter with name
+ * "GetUser_500" will be incremented.
+ */
+class HttpServerMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
- def merge(that: HttpServerMetricsSnapshot, context: CollectionContext): HttpServerMetricsSnapshot = {
- val combinedCountsPerStatus = combineMaps(countsPerStatusCode, that.countsPerStatusCode)((l, r) ⇒ l.merge(r, context))
- val combinedCountsPerTraceAndStatus = combineMaps(countsPerTraceAndStatusCode, that.countsPerTraceAndStatusCode) {
- (leftCounts, rightCounts) ⇒ combineMaps(leftCounts, rightCounts)((l, r) ⇒ l.merge(r, context))
- }
- HttpServerMetricsSnapshot(combinedCountsPerStatus, combinedCountsPerTraceAndStatus)
- }
+ def recordResponse(statusCode: String): Unit =
+ counter(statusCode).increment()
- def metrics: Map[MetricIdentity, MetricSnapshot] = {
- countsPerStatusCode.map {
- case (statusCode, count) ⇒ (CountPerStatusCode(statusCode), count)
- } ++ {
- for (
- (traceName, countsPerStatus) ← countsPerTraceAndStatusCode;
- (statusCode, count) ← countsPerStatus
- ) yield (TraceCountPerStatus(traceName, statusCode), count)
- }
- }
+ def recordResponse(traceName: String, statusCode: String): Unit = {
+ recordResponse(statusCode)
+ counter(traceName + "_" + statusCode).increment()
}
-
- val Factory = HttpServerMetricGroupFactory
}
-case object HttpServerMetricGroupFactory extends MetricGroupFactory {
-
- import HttpServerMetrics._
-
- type GroupRecorder = HttpServerMetricsRecorder
-
- def create(config: Config, system: ActorSystem): HttpServerMetricsRecorder =
- new HttpServerMetricsRecorder()
-
-} \ No newline at end of file
+object HttpServerMetrics extends EntityRecorderFactory[HttpServerMetrics] {
+ def category: String = "http-server"
+ def createRecorder(instrumentFactory: InstrumentFactory): HttpServerMetrics = new HttpServerMetrics(instrumentFactory)
+}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala b/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala
index 5ca4481e..4dc5ff41 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala
@@ -1,3 +1,19 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.instrumentation
import _root_.akka.event.EventStream
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala
deleted file mode 100644
index 366f446d..00000000
--- a/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package akka.instrumentation
-
-import akka.actor._
-import akka.dispatch.{ Envelope, MessageDispatcher }
-import akka.routing.RoutedActorCell
-import kamon.Kamon
-import kamon.metric.ActorMetrics.ActorMetricsRecorder
-import kamon.metric.RouterMetrics.RouterMetricsRecorder
-import kamon.metric.{ ActorMetrics, Metrics, RouterMetrics }
-import kamon.trace._
-import org.aspectj.lang.ProceedingJoinPoint
-import org.aspectj.lang.annotation._
-
-@Aspect
-class ActorCellInstrumentation {
-
- import ActorCellInstrumentation.PimpedActorCellMetrics
-
- @Pointcut("execution(akka.actor.ActorCell.new(..)) && this(cell) && args(system, ref, props, dispatcher, parent)")
- def actorCellCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {}
-
- @After("actorCellCreation(cell, system, ref, props, dispatcher, parent)")
- def afterCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {
-
- val metricsExtension = Kamon(Metrics)(system)
- val metricIdentity = ActorMetrics(ref.path.elements.mkString("/"))
- val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
-
- cellWithMetrics.actorMetricIdentity = metricIdentity
- cellWithMetrics.actorMetricsRecorder = metricsExtension.register(metricIdentity, ActorMetrics.Factory)
-
- cellWithMetrics.onRoutedActorCell { routedActorCell ⇒
- val routerMetricIdentity = RouterMetrics(s"${routedActorCell.asInstanceOf[RoutedActorCell].self.path.elements.mkString("/")}")
- routedActorCell.routerMetricIdentity = routerMetricIdentity
- routedActorCell.routerMetricsRecorder = metricsExtension.register(routerMetricIdentity, RouterMetrics.Factory)
- }
- }
-
- @Pointcut("(execution(* akka.actor.ActorCell.invoke(*)) || execution(* akka.routing.RoutedActorCell.sendMessage(*))) && this(cell) && args(envelope)")
- def invokingActorBehaviourAtActorCell(cell: ActorCell, envelope: Envelope) = {}
-
- @Around("invokingActorBehaviourAtActorCell(cell, envelope)")
- def aroundBehaviourInvoke(pjp: ProceedingJoinPoint, cell: ActorCell, envelope: Envelope): Any = {
- val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
- val timestampBeforeProcessing = System.nanoTime()
- val contextAndTimestamp = envelope.asInstanceOf[TimestampedTraceContextAware]
-
- try {
- TraceRecorder.withInlineTraceContextReplacement(contextAndTimestamp.traceContext) {
- pjp.proceed()
- }
- } finally {
- cellWithMetrics.actorMetricsRecorder.map {
- am ⇒
- val processingTime = System.nanoTime() - timestampBeforeProcessing
- val timeInMailbox = timestampBeforeProcessing - contextAndTimestamp.captureNanoTime
-
- am.processingTime.record(processingTime)
- am.timeInMailbox.record(timeInMailbox)
- am.mailboxSize.decrement()
-
- (processingTime, timeInMailbox)
- } map {
- case (processingTime, timeInMailbox) ⇒
- cellWithMetrics.onRoutedActorCell { routedActorCell ⇒
- routedActorCell.routerMetricsRecorder.map {
- rm ⇒
- rm.processingTime.record(processingTime)
- rm.timeInMailbox.record(timeInMailbox)
- }
- }
- }
- }
- }
-
- @Pointcut("execution(* akka.actor.ActorCell.sendMessage(*)) && this(cell)")
- def sendingMessageToActorCell(cell: ActorCell): Unit = {}
-
- @After("sendingMessageToActorCell(cell)")
- def afterSendMessageToActorCell(cell: ActorCell): Unit = {
- val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
- cellWithMetrics.actorMetricsRecorder.map(am ⇒ am.mailboxSize.increment())
- }
-
- @Pointcut("execution(* akka.actor.ActorCell.stop()) && this(cell)")
- def actorStop(cell: ActorCell): Unit = {}
-
- @After("actorStop(cell)")
- def afterStop(cell: ActorCell): Unit = {
- val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
-
- cellWithMetrics.actorMetricsRecorder.map { p ⇒
- Kamon(Metrics)(cell.system).unregister(cellWithMetrics.actorMetricIdentity)
- }
-
- cellWithMetrics.onRoutedActorCell { routedActorCell ⇒
- routedActorCell.routerMetricsRecorder.map { rm ⇒
- Kamon(Metrics)(cell.system).unregister(cellWithMetrics.routerMetricIdentity)
- }
- }
- }
-
- @Pointcut("execution(* akka.actor.ActorCell.handleInvokeFailure(..)) && this(cell)")
- def actorInvokeFailure(cell: ActorCell): Unit = {}
-
- @Before("actorInvokeFailure(cell)")
- def beforeInvokeFailure(cell: ActorCell): Unit = {
- val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
-
- cellWithMetrics.actorMetricsRecorder.map {
- am ⇒ am.errors.increment()
- }
-
- cellWithMetrics.onRoutedActorCell { routedActorCell ⇒
- routedActorCell.routerMetricsRecorder.map {
- rm ⇒ rm.errors.increment()
- }
- }
- }
-
-}
-
-trait ActorCellMetrics {
- var actorMetricIdentity: ActorMetrics = _
- var routerMetricIdentity: RouterMetrics = _
- var actorMetricsRecorder: Option[ActorMetricsRecorder] = _
- var routerMetricsRecorder: Option[RouterMetricsRecorder] = _
-}
-
-@Aspect
-class ActorCellMetricsIntoActorCellMixin {
-
- @DeclareMixin("akka.actor.ActorCell")
- def mixinActorCellMetricsToActorCell: ActorCellMetrics = new ActorCellMetrics {}
-}
-
-@Aspect
-class TraceContextIntoEnvelopeMixin {
-
- @DeclareMixin("akka.dispatch.Envelope")
- def mixinTraceContextAwareToEnvelope: TimestampedTraceContextAware = TimestampedTraceContextAware.default
-
- @Pointcut("execution(akka.dispatch.Envelope.new(..)) && this(ctx)")
- def envelopeCreation(ctx: TimestampedTraceContextAware): Unit = {}
-
- @After("envelopeCreation(ctx)")
- def afterEnvelopeCreation(ctx: TimestampedTraceContextAware): Unit = {
- // Necessary to force the initialization of ContextAware at the moment of creation.
- ctx.traceContext
- }
-}
-
-object ActorCellInstrumentation {
- implicit class PimpedActorCellMetrics(cell: ActorCellMetrics) {
- def onRoutedActorCell(block: ActorCellMetrics ⇒ Unit): Unit = {
- if (cell.isInstanceOf[RoutedActorCell])
- block(cell)
- }
- }
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala
deleted file mode 100644
index 5e8175fa..00000000
--- a/kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package akka.instrumentation
-
-import akka.actor.ActorRefProvider
-import akka.event.Logging.Warning
-import akka.pattern.{ AskTimeoutException, PromiseActorRef }
-import kamon.Kamon
-import kamon.trace.Trace
-import org.aspectj.lang.annotation.{ AfterReturning, Aspect, Pointcut }
-
-import scala.compat.Platform.EOL
-
-@Aspect
-class AskPatternInstrumentation {
-
- class StackTraceCaptureException extends Throwable
-
- @Pointcut(value = "execution(* akka.pattern.PromiseActorRef$.apply(..)) && args(provider, *)", argNames = "provider")
- def promiseActorRefApply(provider: ActorRefProvider): Unit = {}
-
- @AfterReturning(pointcut = "promiseActorRefApply(provider)", returning = "promiseActor")
- def hookAskTimeoutWarning(provider: ActorRefProvider, promiseActor: PromiseActorRef): Unit = {
- val system = promiseActor.provider.guardian.underlying.system
- val traceExtension = Kamon(Trace)(system)
-
- if (traceExtension.enableAskPatternTracing) {
- val future = promiseActor.result.future
- implicit val ec = system.dispatcher
- val stack = new StackTraceCaptureException
-
- future onFailure {
- case timeout: AskTimeoutException ⇒
- val stackString = stack.getStackTrace.drop(3).mkString("", EOL, EOL)
-
- system.eventStream.publish(Warning("AskPatternTracing", classOf[AskPatternInstrumentation],
- "Timeout triggered for ask pattern registered at: " + stackString))
- }
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
deleted file mode 100644
index 8b3af3d6..00000000
--- a/kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package akka.instrumentation
-
-import java.lang.reflect.Method
-import java.util.concurrent.ThreadPoolExecutor
-
-import akka.actor.{ ActorSystemImpl, Cancellable }
-import akka.dispatch.{ Dispatcher, Dispatchers, ExecutorServiceDelegate, MessageDispatcher }
-import akka.instrumentation.DispatcherMetricsCollector.DispatcherMetricsMeasurement
-import kamon.Kamon
-import kamon.metric.DispatcherMetrics.DispatcherMetricRecorder
-import kamon.metric.{ DispatcherMetrics, Metrics }
-import org.aspectj.lang.annotation._
-
-import scala.concurrent.forkjoin.ForkJoinPool
-
-@Aspect
-class DispatcherInstrumentation {
-
- @Pointcut("execution(akka.dispatch.Dispatchers.new(..)) && this(dispatchers) && cflow(execution(akka.actor.ActorSystemImpl.new(..)) && this(system))")
- def onActorSystemStartup(dispatchers: Dispatchers, system: ActorSystemImpl) = {}
-
- @Before("onActorSystemStartup(dispatchers, system)")
- def beforeActorSystemStartup(dispatchers: Dispatchers, system: ActorSystemImpl): Unit = {
- val currentDispatchers = dispatchers.asInstanceOf[DispatchersWithActorSystem]
- currentDispatchers.actorSystem = system
- }
-
- @Pointcut("execution(* akka.dispatch.Dispatchers.lookup(..)) && this(dispatchers)")
- def onDispatchersLookup(dispatchers: Dispatchers) = {}
-
- @AfterReturning(pointcut = "onDispatchersLookup(dispatchers)", returning = "dispatcher")
- def afterReturningLookup(dispatchers: Dispatchers, dispatcher: Dispatcher): Unit = {
- val dispatchersWithActorSystem = dispatchers.asInstanceOf[DispatchersWithActorSystem]
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
-
- dispatcherWithMetrics.actorSystem = dispatchersWithActorSystem.actorSystem
- }
-
- @Pointcut("call(* akka.dispatch.ExecutorServiceFactory.createExecutorService(..))")
- def onCreateExecutorService(): Unit = {}
-
- @Pointcut("cflow((execution(* akka.dispatch.MessageDispatcher.registerForExecution(..)) || execution(* akka.dispatch.MessageDispatcher.executeTask(..))) && this(dispatcher))")
- def onCflowMessageDispatcher(dispatcher: Dispatcher): Unit = {}
-
- @Pointcut("onCreateExecutorService() && onCflowMessageDispatcher(dispatcher)")
- def onDispatcherStartup(dispatcher: Dispatcher): Unit = {}
-
- @After("onDispatcherStartup(dispatcher)")
- def afterDispatcherStartup(dispatcher: MessageDispatcher): Unit = {
-
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
- val metricsExtension = Kamon(Metrics)(dispatcherWithMetrics.actorSystem)
- val metricIdentity = DispatcherMetrics(dispatcher.id)
-
- dispatcherWithMetrics.metricIdentity = metricIdentity
- dispatcherWithMetrics.dispatcherMetricsRecorder = metricsExtension.register(metricIdentity, DispatcherMetrics.Factory)
-
- if (dispatcherWithMetrics.dispatcherMetricsRecorder.isDefined) {
- dispatcherWithMetrics.dispatcherCollectorCancellable = metricsExtension.scheduleGaugeRecorder {
- dispatcherWithMetrics.dispatcherMetricsRecorder.map {
- dm ⇒
- val DispatcherMetricsMeasurement(maximumPoolSize, runningThreadCount, queueTaskCount, poolSize) =
- DispatcherMetricsCollector.collect(dispatcher)
-
- dm.maximumPoolSize.record(maximumPoolSize)
- dm.runningThreadCount.record(runningThreadCount)
- dm.queueTaskCount.record(queueTaskCount)
- dm.poolSize.record(poolSize)
- }
- }
- }
- }
-
- @Pointcut("execution(* akka.dispatch.MessageDispatcher.shutdown(..)) && this(dispatcher)")
- def onDispatcherShutdown(dispatcher: MessageDispatcher): Unit = {}
-
- @After("onDispatcherShutdown(dispatcher)")
- def afterDispatcherShutdown(dispatcher: MessageDispatcher): Unit = {
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
-
- dispatcherWithMetrics.dispatcherMetricsRecorder.map {
- dispatcher ⇒
- dispatcherWithMetrics.dispatcherCollectorCancellable.cancel()
- Kamon(Metrics)(dispatcherWithMetrics.actorSystem).unregister(dispatcherWithMetrics.metricIdentity)
- }
- }
-}
-
-@Aspect
-class DispatcherMetricCollectionInfoIntoDispatcherMixin {
-
- @DeclareMixin("akka.dispatch.MessageDispatcher")
- def mixinDispatcherMetricsToMessageDispatcher: DispatcherMetricCollectionInfo = new DispatcherMetricCollectionInfo {}
-
- @DeclareMixin("akka.dispatch.Dispatchers")
- def mixinDispatchersToDispatchersWithActorSystem: DispatchersWithActorSystem = new DispatchersWithActorSystem {}
-}
-
-trait DispatcherMetricCollectionInfo {
- var metricIdentity: DispatcherMetrics = _
- var dispatcherMetricsRecorder: Option[DispatcherMetricRecorder] = _
- var dispatcherCollectorCancellable: Cancellable = _
- var actorSystem: ActorSystemImpl = _
-}
-
-trait DispatchersWithActorSystem {
- var actorSystem: ActorSystemImpl = _
-}
-
-object DispatcherMetricsCollector {
-
- case class DispatcherMetricsMeasurement(maximumPoolSize: Long, runningThreadCount: Long, queueTaskCount: Long, poolSize: Long)
-
- private def collectForkJoinMetrics(pool: ForkJoinPool): DispatcherMetricsMeasurement = {
- DispatcherMetricsMeasurement(pool.getParallelism, pool.getActiveThreadCount,
- (pool.getQueuedTaskCount + pool.getQueuedSubmissionCount), pool.getPoolSize)
- }
-
- private def collectExecutorMetrics(pool: ThreadPoolExecutor): DispatcherMetricsMeasurement = {
- DispatcherMetricsMeasurement(pool.getMaximumPoolSize, pool.getActiveCount, pool.getQueue.size(), pool.getPoolSize)
- }
-
- private val executorServiceMethod: Method = {
- // executorService is protected
- val method = classOf[Dispatcher].getDeclaredMethod("executorService")
- method.setAccessible(true)
- method
- }
-
- def collect(dispatcher: MessageDispatcher): DispatcherMetricsMeasurement = {
- dispatcher match {
- case x: Dispatcher ⇒ {
- val executor = executorServiceMethod.invoke(x) match {
- case delegate: ExecutorServiceDelegate ⇒ delegate.executor
- case other ⇒ other
- }
-
- executor match {
- case fjp: ForkJoinPool ⇒ collectForkJoinMetrics(fjp)
- case tpe: ThreadPoolExecutor ⇒ collectExecutorMetrics(tpe)
- case anything ⇒ DispatcherMetricsMeasurement(0L, 0L, 0L, 0L)
- }
- }
- case _ ⇒ new DispatcherMetricsMeasurement(0L, 0L, 0L, 0L)
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala b/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala
deleted file mode 100644
index d2cb4e38..00000000
--- a/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.instrument.{ MinMaxCounter, Counter, Histogram }
-
-case class ActorMetrics(name: String) extends MetricGroupIdentity {
- val category = ActorMetrics
-}
-
-object ActorMetrics extends MetricGroupCategory {
- val name = "actor"
-
- case object ProcessingTime extends MetricIdentity { val name = "processing-time" }
- case object MailboxSize extends MetricIdentity { val name = "mailbox-size" }
- case object TimeInMailbox extends MetricIdentity { val name = "time-in-mailbox" }
- case object Errors extends MetricIdentity { val name = "errors" }
-
- case class ActorMetricsRecorder(processingTime: Histogram, timeInMailbox: Histogram, mailboxSize: MinMaxCounter,
- errors: Counter) extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): ActorMetricSnapshot =
- ActorMetricSnapshot(
- processingTime.collect(context),
- timeInMailbox.collect(context),
- mailboxSize.collect(context),
- errors.collect(context))
-
- def cleanup: Unit = {
- processingTime.cleanup
- mailboxSize.cleanup
- timeInMailbox.cleanup
- errors.cleanup
- }
- }
-
- case class ActorMetricSnapshot(processingTime: Histogram.Snapshot, timeInMailbox: Histogram.Snapshot,
- mailboxSize: Histogram.Snapshot, errors: Counter.Snapshot) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ActorMetricSnapshot
-
- def merge(that: ActorMetricSnapshot, context: CollectionContext): ActorMetricSnapshot =
- ActorMetricSnapshot(
- processingTime.merge(that.processingTime, context),
- timeInMailbox.merge(that.timeInMailbox, context),
- mailboxSize.merge(that.mailboxSize, context),
- errors.merge(that.errors, context))
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- (ProcessingTime -> processingTime),
- (MailboxSize -> mailboxSize),
- (TimeInMailbox -> timeInMailbox),
- (Errors -> errors))
- }
-
- val Factory = ActorMetricGroupFactory
-}
-
-case object ActorMetricGroupFactory extends MetricGroupFactory {
- import ActorMetrics._
-
- type GroupRecorder = ActorMetricsRecorder
-
- def create(config: Config, system: ActorSystem): ActorMetricsRecorder = {
- val settings = config.getConfig("precision.actor")
-
- val processingTimeConfig = settings.getConfig("processing-time")
- val timeInMailboxConfig = settings.getConfig("time-in-mailbox")
- val mailboxSizeConfig = settings.getConfig("mailbox-size")
-
- new ActorMetricsRecorder(
- Histogram.fromConfig(processingTimeConfig),
- Histogram.fromConfig(timeInMailboxConfig),
- MinMaxCounter.fromConfig(mailboxSizeConfig, system),
- Counter())
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala b/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala
deleted file mode 100644
index 126f6333..00000000
--- a/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.instrument.{ Histogram, HdrHistogram }
-
-case class DispatcherMetrics(name: String) extends MetricGroupIdentity {
- val category = DispatcherMetrics
-}
-
-object DispatcherMetrics extends MetricGroupCategory {
- val name = "dispatcher"
-
- case object MaximumPoolSize extends MetricIdentity { val name = "maximum-pool-size" }
- case object RunningThreadCount extends MetricIdentity { val name = "running-thread-count" }
- case object QueueTaskCount extends MetricIdentity { val name = "queued-task-count" }
- case object PoolSize extends MetricIdentity { val name = "pool-size" }
-
- case class DispatcherMetricRecorder(maximumPoolSize: Histogram, runningThreadCount: Histogram,
- queueTaskCount: Histogram, poolSize: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot =
- DispatcherMetricSnapshot(
- maximumPoolSize.collect(context),
- runningThreadCount.collect(context),
- queueTaskCount.collect(context),
- poolSize.collect(context))
-
- def cleanup: Unit = {}
-
- }
-
- case class DispatcherMetricSnapshot(maximumPoolSize: Histogram.Snapshot, runningThreadCount: Histogram.Snapshot,
- queueTaskCount: Histogram.Snapshot, poolSize: Histogram.Snapshot) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = DispatcherMetricSnapshot
-
- def merge(that: DispatcherMetricSnapshot, context: CollectionContext): DispatcherMetricSnapshot =
- DispatcherMetricSnapshot(
- maximumPoolSize.merge(that.maximumPoolSize, context),
- runningThreadCount.merge(that.runningThreadCount, context),
- queueTaskCount.merge(that.queueTaskCount, context),
- poolSize.merge(that.poolSize, context))
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- (MaximumPoolSize -> maximumPoolSize),
- (RunningThreadCount -> runningThreadCount),
- (QueueTaskCount -> queueTaskCount),
- (PoolSize -> poolSize))
- }
-
- val Factory = DispatcherMetricGroupFactory
-}
-
-case object DispatcherMetricGroupFactory extends MetricGroupFactory {
-
- import DispatcherMetrics._
-
- type GroupRecorder = DispatcherMetricRecorder
-
- def create(config: Config, system: ActorSystem): DispatcherMetricRecorder = {
- val settings = config.getConfig("precision.dispatcher")
-
- val maximumPoolSizeConfig = settings.getConfig("maximum-pool-size")
- val runningThreadCountConfig = settings.getConfig("running-thread-count")
- val queueTaskCountConfig = settings.getConfig("queued-task-count")
- val poolSizeConfig = settings.getConfig("pool-size")
-
- new DispatcherMetricRecorder(
- Histogram.fromConfig(maximumPoolSizeConfig),
- Histogram.fromConfig(runningThreadCountConfig),
- Histogram.fromConfig(queueTaskCountConfig),
- Histogram.fromConfig(poolSizeConfig))
- }
-
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/Entity.scala b/kamon-core/src/main/scala/kamon/metric/Entity.scala
new file mode 100644
index 00000000..8d328f83
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/Entity.scala
@@ -0,0 +1,58 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+/**
+ * Identify a `thing` that is being monitored by Kamon. A [[kamon.metric.Entity]] is used to identify tracked `things`
+ * in both the metrics recording and reporting sides. Only the name and category fields are used with determining
+ * equality between two entities.
+ *
+ * // TODO: Find a better word for `thing`.
+ */
+class Entity(val name: String, val category: String, val metadata: Map[String, String]) {
+
+ override def equals(o: Any): Boolean = {
+ if (this eq o.asInstanceOf[AnyRef])
+ true
+ else if ((o.asInstanceOf[AnyRef] eq null) || !o.isInstanceOf[Entity])
+ false
+ else {
+ val thatAsEntity = o.asInstanceOf[Entity]
+ category == thatAsEntity.category && name == thatAsEntity.name
+ }
+ }
+
+ override def hashCode: Int = {
+ var result: Int = name.hashCode
+ result = 31 * result + category.hashCode
+ return result
+ }
+}
+
+object Entity {
+ def apply(name: String, category: String): Entity =
+ apply(name, category, Map.empty)
+
+ def apply(name: String, category: String, metadata: Map[String, String]): Entity =
+ new Entity(name, category, metadata)
+
+ def create(name: String, category: String): Entity =
+ apply(name, category, Map.empty)
+
+ def create(name: String, category: String, metadata: Map[String, String]): Entity =
+ new Entity(name, category, metadata)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala b/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala
new file mode 100644
index 00000000..6e0a4248
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala
@@ -0,0 +1,173 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.metric.instrument._
+
+import scala.collection.concurrent.TrieMap
+import scala.concurrent.duration.FiniteDuration
+
+trait EntityRecorder {
+ def collect(collectionContext: CollectionContext): EntitySnapshot
+ def cleanup: Unit
+}
+
+trait EntityRecorderFactory[T <: EntityRecorder] {
+ def category: String
+ def createRecorder(instrumentFactory: InstrumentFactory): T
+}
+
+abstract class GenericEntityRecorder(instrumentFactory: InstrumentFactory) extends EntityRecorder {
+ import kamon.util.TriemapAtomicGetOrElseUpdate.Syntax
+
+ private val _instruments = TrieMap.empty[MetricKey, Instrument]
+ private def register[T <: Instrument](key: MetricKey, instrument: ⇒ T): T =
+ _instruments.atomicGetOrElseUpdate(key, instrument, _.cleanup).asInstanceOf[T]
+
+ protected def histogram(name: String): Histogram =
+ register(HistogramKey(name), instrumentFactory.createHistogram(name))
+
+ protected def histogram(name: String, dynamicRange: DynamicRange): Histogram =
+ register(HistogramKey(name), instrumentFactory.createHistogram(name, Some(dynamicRange)))
+
+ protected def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ register(HistogramKey(name, unitOfMeasurement), instrumentFactory.createHistogram(name))
+
+ protected def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ register(HistogramKey(name, unitOfMeasurement), instrumentFactory.createHistogram(name, Some(dynamicRange)))
+
+ protected def histogram(key: HistogramKey): Histogram =
+ register(key, instrumentFactory.createHistogram(key.name))
+
+ protected def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram =
+ register(key, instrumentFactory.createHistogram(key.name, Some(dynamicRange)))
+
+ protected def removeHistogram(name: String): Unit =
+ _instruments.remove(HistogramKey(name))
+
+ protected def removeHistogram(key: HistogramKey): Unit =
+ _instruments.remove(key)
+
+ protected def minMaxCounter(name: String): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange)))
+
+ protected def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange)))
+
+ protected def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, Some(dynamicRange)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def removeMinMaxCounter(name: String): Unit =
+ _instruments.remove(MinMaxCounterKey(name))
+
+ protected def removeMinMaxCounter(key: MinMaxCounterKey): Unit =
+ _instruments.remove(key)
+
+ protected def gauge(name: String, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, Some(dynamicRange), Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, Some(dynamicRange), Some(refreshInterval), valueCollector))
+
+ protected def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, Some(dynamicRange), Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def removeGauge(name: String): Unit =
+ _instruments.remove(GaugeKey(name))
+
+ protected def removeGauge(key: GaugeKey): Unit =
+ _instruments.remove(key)
+
+ protected def counter(name: String): Counter =
+ register(CounterKey(name), instrumentFactory.createCounter())
+
+ protected def counter(key: CounterKey): Counter =
+ register(key, instrumentFactory.createCounter())
+
+ protected def removeCounter(name: String): Unit =
+ _instruments.remove(CounterKey(name))
+
+ protected def removeCounter(key: CounterKey): Unit =
+ _instruments.remove(key)
+
+ def collect(collectionContext: CollectionContext): EntitySnapshot = {
+ val snapshots = Map.newBuilder[MetricKey, InstrumentSnapshot]
+ _instruments.foreach {
+ case (key, instrument) ⇒ snapshots += key -> instrument.collect(collectionContext)
+ }
+
+ new DefaultEntitySnapshot(snapshots.result())
+ }
+
+ def cleanup: Unit = _instruments.values.foreach(_.cleanup)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala b/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala
new file mode 100644
index 00000000..7ebb79e2
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala
@@ -0,0 +1,63 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import kamon.metric.instrument.{ Counter, Histogram, CollectionContext, InstrumentSnapshot }
+import kamon.util.MapMerge
+import scala.reflect.ClassTag
+
+trait EntitySnapshot {
+ def metrics: Map[MetricKey, InstrumentSnapshot]
+ def merge(that: EntitySnapshot, collectionContext: CollectionContext): EntitySnapshot
+
+ def histogram(name: String): Option[Histogram.Snapshot] =
+ find[HistogramKey, Histogram.Snapshot](name)
+
+ def minMaxCounter(name: String): Option[Histogram.Snapshot] =
+ find[MinMaxCounterKey, Histogram.Snapshot](name)
+
+ def gauge(name: String): Option[Histogram.Snapshot] =
+ find[GaugeKey, Histogram.Snapshot](name)
+
+ def counter(name: String): Option[Counter.Snapshot] =
+ find[CounterKey, Counter.Snapshot](name)
+
+ def histograms: Map[HistogramKey, Histogram.Snapshot] =
+ filterByType[HistogramKey, Histogram.Snapshot]
+
+ def minMaxCounters: Map[MinMaxCounterKey, Histogram.Snapshot] =
+ filterByType[MinMaxCounterKey, Histogram.Snapshot]
+
+ def gauges: Map[GaugeKey, Histogram.Snapshot] =
+ filterByType[GaugeKey, Histogram.Snapshot]
+
+ def counters: Map[CounterKey, Counter.Snapshot] =
+ filterByType[CounterKey, Counter.Snapshot]
+
+ private def filterByType[K <: MetricKey, V <: InstrumentSnapshot](implicit keyCT: ClassTag[K]): Map[K, V] =
+ metrics.collect { case (k, v) if keyCT.runtimeClass.isInstance(k) ⇒ (k.asInstanceOf[K], v.asInstanceOf[V]) }
+
+ private def find[K <: MetricKey, V <: InstrumentSnapshot](name: String)(implicit keyCT: ClassTag[K]) =
+ metrics.find { case (k, v) ⇒ keyCT.runtimeClass.isInstance(k) && k.name == name } map (_._2.asInstanceOf[V])
+}
+
+class DefaultEntitySnapshot(val metrics: Map[MetricKey, InstrumentSnapshot]) extends EntitySnapshot {
+ import MapMerge.Syntax
+
+ override def merge(that: EntitySnapshot, collectionContext: CollectionContext): EntitySnapshot =
+ new DefaultEntitySnapshot(metrics.merge(that.metrics, (l, r) ⇒ l.merge(r, collectionContext)))
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricKey.scala b/kamon-core/src/main/scala/kamon/metric/MetricKey.scala
new file mode 100644
index 00000000..a5d30c81
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/MetricKey.scala
@@ -0,0 +1,169 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import kamon.metric.instrument.{ InstrumentTypes, InstrumentType, UnitOfMeasurement }
+
+/**
+ * MetricKeys are used to identify a given metric in entity recorders and snapshots. MetricKeys can be used to encode
+ * additional metadata for a metric being recorded, as well as the unit of measurement of the data being recorder.
+ */
+sealed trait MetricKey {
+ def name: String
+ def unitOfMeasurement: UnitOfMeasurement
+ def instrumentType: InstrumentType
+ def metadata: Map[String, String]
+}
+
+// Wish that there was a shorter way to describe the operations bellow, but apparently there is no way to generalize all
+// the apply/create versions that would produce the desired return types when used from Java.
+
+/**
+ * MetricKey for all Histogram-based metrics.
+ */
+case class HistogramKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Histogram
+}
+
+object HistogramKey {
+ def apply(name: String): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): HistogramKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): HistogramKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): HistogramKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): HistogramKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all MinMaxCounter-based metrics.
+ */
+case class MinMaxCounterKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.MinMaxCounter
+}
+
+object MinMaxCounterKey {
+ def apply(name: String): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounterKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounterKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all Gauge-based metrics.
+ */
+case class GaugeKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Gauge
+}
+
+object GaugeKey {
+ def apply(name: String): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): GaugeKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): GaugeKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): GaugeKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): GaugeKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all Counter-based metrics.
+ */
+case class CounterKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Counter
+}
+
+object CounterKey {
+ def apply(name: String): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): CounterKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): CounterKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): CounterKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): CounterKey =
+ apply(name, unitOfMeasurement, metadata)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
index f491cc57..87911352 100644
--- a/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
+++ b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
@@ -16,129 +16,126 @@
package kamon.metric
-import akka.event.Logging.Error
-import akka.event.EventStream
+import com.typesafe.config.Config
+import kamon.metric.SubscriptionsDispatcher.{ Unsubscribe, Subscribe }
+import kamon.metric.instrument.{ DefaultRefreshScheduler, InstrumentFactory, CollectionContext }
import scala.collection.concurrent.TrieMap
import akka.actor._
-import com.typesafe.config.Config
-import kamon.util.GlobPathFilter
-import kamon.Kamon
-import akka.actor
-import kamon.metric.Metrics.MetricGroupFilter
-import kamon.metric.Subscriptions.{ Unsubscribe, Subscribe }
-import java.util.concurrent.TimeUnit
-
-class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- import Metrics.AtomicGetOrElseUpdateForTriemap
-
- val metricsExtConfig = system.settings.config.getConfig("kamon.metrics")
- printInitializationMessage(system.eventStream, metricsExtConfig.getBoolean("disable-aspectj-weaver-missing-error"))
-
- /** Configured Dispatchers */
- val metricSubscriptionsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.metric-subscriptions"))
- val gaugeRecordingsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.gauge-recordings"))
-
- /** Configuration Settings */
- val gaugeRecordingInterval: Long = metricsExtConfig.getMilliseconds("gauge-recording-interval")
-
- val storage = TrieMap[MetricGroupIdentity, MetricGroupRecorder]()
- val filters = loadFilters(metricsExtConfig)
- lazy val subscriptions = system.actorOf(Props[Subscriptions], "kamon-metrics-subscriptions")
-
- def register(identity: MetricGroupIdentity, factory: MetricGroupFactory): Option[factory.GroupRecorder] = {
- if (shouldTrack(identity))
- Some(storage.atomicGetOrElseUpdate(identity, factory.create(metricsExtConfig, system)).asInstanceOf[factory.GroupRecorder])
- else
- None
- }
+import kamon.util.{ LazyActorRef, TriemapAtomicGetOrElseUpdate }
- def unregister(identity: MetricGroupIdentity): Unit = {
- storage.remove(identity).map(_.cleanup)
- }
+case class EntityRegistration[T <: EntityRecorder](entity: Entity, recorder: T)
- def subscribe[C <: MetricGroupCategory](category: C, selection: String, subscriber: ActorRef, permanently: Boolean = false): Unit =
- subscriptions.tell(Subscribe(category, selection, subscriber, permanently), subscriber)
+trait MetricsExtension {
+ def settings: MetricsExtensionSettings
+ def shouldTrack(entity: Entity): Boolean
+ def shouldTrack(entityName: String, category: String): Boolean =
+ shouldTrack(Entity(entityName, category))
- def unsubscribe(subscriber: ActorRef): Unit =
- subscriptions.tell(Unsubscribe(subscriber), subscriber)
+ def register[T <: EntityRecorder](recorderFactory: EntityRecorderFactory[T], entityName: String): Option[EntityRegistration[T]]
+ def register[T <: EntityRecorder](entity: Entity, recorder: T): EntityRegistration[T]
+ def unregister(entity: Entity): Unit
- def scheduleGaugeRecorder(body: ⇒ Unit): Cancellable = {
- import scala.concurrent.duration._
+ def find(entity: Entity): Option[EntityRecorder]
+ def find(name: String, category: String): Option[EntityRecorder]
- system.scheduler.schedule(gaugeRecordingInterval milliseconds, gaugeRecordingInterval milliseconds) {
- body
- }(gaugeRecordingsDispatcher)
- }
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef): Unit =
+ subscribe(filter, subscriber, permanently = false)
- private def shouldTrack(identity: MetricGroupIdentity): Boolean = {
- filters.get(identity.category.name).map(filter ⇒ filter.accept(identity.name)).getOrElse(true)
- }
+ def subscribe(category: String, selection: String, subscriber: ActorRef, permanently: Boolean): Unit =
+ subscribe(SubscriptionFilter(category, selection), subscriber, permanently)
+
+ def subscribe(category: String, selection: String, subscriber: ActorRef): Unit =
+ subscribe(SubscriptionFilter(category, selection), subscriber, permanently = false)
- def loadFilters(config: Config): Map[String, MetricGroupFilter] = {
- import scala.collection.JavaConverters._
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanently: Boolean): Unit
- val filters = config.getObjectList("filters").asScala
+ def unsubscribe(subscriber: ActorRef): Unit
+ def buildDefaultCollectionContext: CollectionContext
+ def instrumentFactory(category: String): InstrumentFactory
+}
+
+private[kamon] class MetricsExtensionImpl(config: Config) extends MetricsExtension {
+ private val _trackedEntities = TrieMap.empty[Entity, EntityRecorder]
+ private val _subscriptions = new LazyActorRef
+
+ val settings = MetricsExtensionSettings(config)
+
+ def shouldTrack(entity: Entity): Boolean =
+ settings.entityFilters.get(entity.category).map {
+ filter ⇒ filter.accept(entity.name)
- val allFilters =
- for (
- filter ← filters;
- entry ← filter.entrySet().asScala
- ) yield {
- val key = entry.getKey
- val keyBasedConfig = entry.getValue.atKey(key)
+ } getOrElse (settings.trackUnmatchedEntities)
- val includes = keyBasedConfig.getStringList(s"$key.includes").asScala.map(inc ⇒ new GlobPathFilter(inc)).toList
- val excludes = keyBasedConfig.getStringList(s"$key.excludes").asScala.map(exc ⇒ new GlobPathFilter(exc)).toList
+ def register[T <: EntityRecorder](recorderFactory: EntityRecorderFactory[T], entityName: String): Option[EntityRegistration[T]] = {
+ import TriemapAtomicGetOrElseUpdate.Syntax
+ val entity = Entity(entityName, recorderFactory.category)
- (key, MetricGroupFilter(includes, excludes))
- }
+ if (shouldTrack(entity)) {
+ val instrumentFactory = settings.instrumentFactories.get(recorderFactory.category).getOrElse(settings.defaultInstrumentFactory)
+ val recorder = _trackedEntities.atomicGetOrElseUpdate(entity, recorderFactory.createRecorder(instrumentFactory), _.cleanup).asInstanceOf[T]
- allFilters.toMap
+ Some(EntityRegistration(entity, recorder))
+ } else None
}
- def buildDefaultCollectionContext: CollectionContext =
- CollectionContext(metricsExtConfig.getInt("default-collection-context-buffer-size"))
-
- def printInitializationMessage(eventStream: EventStream, disableWeaverMissingError: Boolean): Unit = {
- if (!disableWeaverMissingError) {
- val weaverMissingMessage =
- """
- |
- | ___ _ ___ _ _ ___ ___ _ _
- | / _ \ | | |_ | | | | | | \/ |(_) (_)
- |/ /_\ \ ___ _ __ ___ ___ | |_ | | | | | | ___ __ _ __ __ ___ _ __ | . . | _ ___ ___ _ _ __ __ _
- || _ |/ __|| '_ \ / _ \ / __|| __| | | | |/\| | / _ \ / _` |\ \ / // _ \| '__| | |\/| || |/ __|/ __|| || '_ \ / _` |
- || | | |\__ \| |_) || __/| (__ | |_ /\__/ / \ /\ /| __/| (_| | \ V /| __/| | | | | || |\__ \\__ \| || | | || (_| |
- |\_| |_/|___/| .__/ \___| \___| \__|\____/ \/ \/ \___| \__,_| \_/ \___||_| \_| |_/|_||___/|___/|_||_| |_| \__, |
- | | | __/ |
- | |_| |___/
- |
- | It seems like your application wasn't started with the -javaagent:/path-to-aspectj-weaver.jar option. Without that Kamon might
- | not work properly, if you need help on setting up the weaver go to http://kamon.io/introduction/get-started/ for more info. If
- | you are sure that you don't need the weaver (e.g. you are only using KamonStandalone) then you can disable this error message
- | by changing the kamon.metrics.disable-aspectj-weaver-missing-error setting in your configuration file.
- |
- """.stripMargin
-
- eventStream.publish(Error("MetricsExtension", classOf[MetricsExtension], weaverMissingMessage))
+ def register[T <: EntityRecorder](entity: Entity, recorder: T): EntityRegistration[T] = {
+ _trackedEntities.put(entity, recorder).map { oldRecorder ⇒
+ oldRecorder.cleanup
}
+
+ EntityRegistration(entity, recorder)
}
-}
-object Metrics extends ExtensionId[MetricsExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
- def createExtension(system: ExtendedActorSystem): MetricsExtension = new MetricsExtension(system)
+ def unregister(entity: Entity): Unit =
+ _trackedEntities.remove(entity).map(_.cleanup)
+
+ def find(entity: Entity): Option[EntityRecorder] =
+ _trackedEntities.get(entity)
- case class MetricGroupFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
- def accept(name: String): Boolean = includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
+ def find(name: String, category: String): Option[EntityRecorder] =
+ find(Entity(name, category))
+
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanent: Boolean): Unit =
+ _subscriptions.tell(Subscribe(filter, subscriber, permanent))
+
+ def unsubscribe(subscriber: ActorRef): Unit =
+ _subscriptions.tell(Unsubscribe(subscriber))
+
+ def buildDefaultCollectionContext: CollectionContext =
+ CollectionContext(settings.defaultCollectionContextBufferSize)
+
+ def instrumentFactory(category: String): InstrumentFactory =
+ settings.instrumentFactories.getOrElse(category, settings.defaultInstrumentFactory)
+
+ private[kamon] def collectSnapshots(collectionContext: CollectionContext): Map[Entity, EntitySnapshot] = {
+ val builder = Map.newBuilder[Entity, EntitySnapshot]
+ _trackedEntities.foreach {
+ case (identity, recorder) ⇒ builder += ((identity, recorder.collect(collectionContext)))
+ }
+
+ builder.result()
+ }
+
+ /**
+ * Metrics Extension initialization.
+ */
+ private var _system: ActorSystem = null
+ private lazy val _start = {
+ _subscriptions.point(_system.actorOf(SubscriptionsDispatcher.props(settings.tickInterval, this), "metrics"))
+ settings.pointScheduler(DefaultRefreshScheduler(_system.scheduler, _system.dispatcher))
}
- implicit class AtomicGetOrElseUpdateForTriemap[K, V](trieMap: TrieMap[K, V]) {
- def atomicGetOrElseUpdate(key: K, op: ⇒ V): V =
- trieMap.get(key) match {
- case Some(v) ⇒ v
- case None ⇒ val d = op; trieMap.putIfAbsent(key, d).getOrElse(d)
- }
+ def start(system: ActorSystem): Unit = synchronized {
+ _system = system
+ _start
+ _system = null
}
}
+
+private[kamon] object MetricsExtensionImpl {
+
+ def apply(config: Config) =
+ new MetricsExtensionImpl(config)
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala b/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala
new file mode 100644
index 00000000..9881ed00
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala
@@ -0,0 +1,117 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import com.typesafe.config.Config
+import kamon.metric.instrument._
+import kamon.util.GlobPathFilter
+
+import scala.concurrent.duration.FiniteDuration
+
+/**
+ * Configuration settings for the Metrics extension, as read from the `kamon.metric` configuration key.
+ */
+case class MetricsExtensionSettings(
+ tickInterval: FiniteDuration,
+ defaultCollectionContextBufferSize: Int,
+ trackUnmatchedEntities: Boolean,
+ entityFilters: Map[String, EntityFilter],
+ instrumentFactories: Map[String, InstrumentFactory],
+ defaultInstrumentFactory: InstrumentFactory,
+ refreshScheduler: RefreshScheduler) {
+
+ private[kamon] def pointScheduler(targetScheduler: RefreshScheduler): Unit = refreshScheduler match {
+ case lrs: LazyRefreshScheduler ⇒ lrs.point(targetScheduler)
+ case others ⇒
+ }
+}
+
+/**
+ *
+ */
+case class EntityFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
+ def accept(name: String): Boolean =
+ includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
+}
+
+object MetricsExtensionSettings {
+ import kamon.util.ConfigTools.Syntax
+ import scala.concurrent.duration._
+
+ def apply(config: Config): MetricsExtensionSettings = {
+ val metricConfig = config.getConfig("kamon.metric")
+
+ val tickInterval = metricConfig.getFiniteDuration("tick-interval")
+ val collectBufferSize = metricConfig.getInt("default-collection-context-buffer-size")
+ val trackUnmatchedEntities = metricConfig.getBoolean("track-unmatched-entities")
+ val entityFilters = loadFilters(metricConfig.getConfig("filters"))
+ val defaultInstrumentSettings = DefaultInstrumentSettings.fromConfig(metricConfig.getConfig("default-instrument-settings"))
+
+ val refreshScheduler = new LazyRefreshScheduler
+ val instrumentFactories = loadInstrumentFactories(metricConfig.getConfig("instrument-settings"), defaultInstrumentSettings, refreshScheduler)
+ val defaultInstrumentFactory = new InstrumentFactory(Map.empty, defaultInstrumentSettings, refreshScheduler)
+
+ MetricsExtensionSettings(tickInterval, collectBufferSize, trackUnmatchedEntities, entityFilters, instrumentFactories,
+ defaultInstrumentFactory, refreshScheduler)
+ }
+
+ /**
+ * Load all the default filters configured under the `kamon.metric.filters` configuration key. All filters are
+ * defined with the entity category as a sub-key of the `kamon.metric.filters` key and two sub-keys to it: includes
+ * and excludes with lists of string glob patterns as values. Example:
+ *
+ * {{{
+ *
+ * kamon.metrics.filters {
+ * actor {
+ * includes = ["user/test-actor", "user/service/worker-*"]
+ * excludes = ["user/IO-*"]
+ * }
+ * }
+ *
+ * }}}
+ *
+ * @return a Map from category name to corresponding entity filter.
+ */
+ def loadFilters(filtersConfig: Config): Map[String, EntityFilter] = {
+ import scala.collection.JavaConverters._
+
+ filtersConfig.firstLevelKeys map { category: String ⇒
+ val includes = filtersConfig.getStringList(s"$category.includes").asScala.map(inc ⇒ new GlobPathFilter(inc)).toList
+ val excludes = filtersConfig.getStringList(s"$category.excludes").asScala.map(exc ⇒ new GlobPathFilter(exc)).toList
+
+ (category, EntityFilter(includes, excludes))
+ } toMap
+ }
+
+ /**
+ * Load any custom configuration settings defined under the `kamon.metric.instrument-settings` configuration key and
+ * create InstrumentFactories for them.
+ *
+ * @return a Map from category name to InstrumentFactory.
+ */
+ def loadInstrumentFactories(instrumentSettings: Config, defaults: DefaultInstrumentSettings, refreshScheduler: RefreshScheduler): Map[String, InstrumentFactory] = {
+ instrumentSettings.firstLevelKeys.map { category ⇒
+ val categoryConfig = instrumentSettings.getConfig(category)
+ val customSettings = categoryConfig.firstLevelKeys.map { instrumentName ⇒
+ (instrumentName, InstrumentCustomSettings.fromConfig(categoryConfig.getConfig(instrumentName)))
+ } toMap
+
+ (category, new InstrumentFactory(customSettings, defaults, refreshScheduler))
+ } toMap
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/RouterMetrics.scala b/kamon-core/src/main/scala/kamon/metric/RouterMetrics.scala
deleted file mode 100644
index ddfef416..00000000
--- a/kamon-core/src/main/scala/kamon/metric/RouterMetrics.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metric
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.instrument.{ Counter, Histogram }
-
-case class RouterMetrics(name: String) extends MetricGroupIdentity {
- val category = RouterMetrics
-}
-
-object RouterMetrics extends MetricGroupCategory {
- val name = "router"
-
- case object ProcessingTime extends MetricIdentity { val name = "processing-time" }
- case object TimeInMailbox extends MetricIdentity { val name = "time-in-mailbox" }
- case object Errors extends MetricIdentity { val name = "errors" }
-
- case class RouterMetricsRecorder(processingTime: Histogram, timeInMailbox: Histogram, errors: Counter) extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): RouterMetricSnapshot =
- RouterMetricSnapshot(processingTime.collect(context), timeInMailbox.collect(context), errors.collect(context))
-
- def cleanup: Unit = {
- processingTime.cleanup
- timeInMailbox.cleanup
- errors.cleanup
- }
- }
-
- case class RouterMetricSnapshot(processingTime: Histogram.Snapshot, timeInMailbox: Histogram.Snapshot, errors: Counter.Snapshot) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = RouterMetricSnapshot
-
- def merge(that: RouterMetricSnapshot, context: CollectionContext): RouterMetricSnapshot =
- RouterMetricSnapshot(
- processingTime.merge(that.processingTime, context),
- timeInMailbox.merge(that.timeInMailbox, context),
- errors.merge(that.errors, context))
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- ProcessingTime -> processingTime,
- TimeInMailbox -> timeInMailbox,
- Errors -> errors)
- }
-
- val Factory = RouterMetricGroupFactory
-}
-
-case object RouterMetricGroupFactory extends MetricGroupFactory {
-
- import RouterMetrics._
-
- type GroupRecorder = RouterMetricsRecorder
-
- def create(config: Config, system: ActorSystem): RouterMetricsRecorder = {
- val settings = config.getConfig("precision.router")
-
- val processingTimeConfig = settings.getConfig("processing-time")
- val timeInMailboxConfig = settings.getConfig("time-in-mailbox")
-
- new RouterMetricsRecorder(
- Histogram.fromConfig(processingTimeConfig),
- Histogram.fromConfig(timeInMailboxConfig),
- Counter())
- }
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala b/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
deleted file mode 100644
index 1ba9f312..00000000
--- a/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import akka.actor._
-import kamon.metric.Subscriptions._
-import kamon.util.GlobPathFilter
-import scala.concurrent.duration.{ FiniteDuration, Duration }
-import java.util.concurrent.TimeUnit
-import kamon.Kamon
-import kamon.metric.TickMetricSnapshotBuffer.FlushBuffer
-
-class Subscriptions extends Actor {
- import context.system
-
- val flushMetricsSchedule = scheduleFlushMessage()
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
-
- var lastTick: Long = System.currentTimeMillis()
- var oneShotSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
- var permanentSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
-
- def receive = {
- case Subscribe(category, selection, subscriber, permanent) ⇒ subscribe(category, selection, subscriber, permanent)
- case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber)
- case Terminated(subscriber) ⇒ unsubscribe(subscriber)
- case FlushMetrics ⇒ flush()
- }
-
- def subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanent: Boolean): Unit = {
- context.watch(subscriber)
- val newFilter: MetricSelectionFilter = GroupAndPatternFilter(category, new GlobPathFilter(selection))
-
- if (permanent) {
- permanentSubscriptions = permanentSubscriptions.updated(subscriber, newFilter combine {
- permanentSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
- })
- } else {
- oneShotSubscriptions = oneShotSubscriptions.updated(subscriber, newFilter combine {
- oneShotSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
- })
- }
- }
-
- def unsubscribe(subscriber: ActorRef): Unit = {
- if (permanentSubscriptions.contains(subscriber))
- permanentSubscriptions = permanentSubscriptions - subscriber
-
- if (oneShotSubscriptions.contains(subscriber))
- oneShotSubscriptions = oneShotSubscriptions - subscriber
- }
-
- def flush(): Unit = {
- val currentTick = System.currentTimeMillis()
- val snapshots = collectAll()
-
- dispatchSelectedMetrics(lastTick, currentTick, permanentSubscriptions, snapshots)
- dispatchSelectedMetrics(lastTick, currentTick, oneShotSubscriptions, snapshots)
-
- lastTick = currentTick
- oneShotSubscriptions = Map.empty
- }
-
- def collectAll(): Map[MetricGroupIdentity, MetricGroupSnapshot] = {
- val allMetrics = Kamon(Metrics).storage
- val builder = Map.newBuilder[MetricGroupIdentity, MetricGroupSnapshot]
-
- allMetrics.foreach {
- case (identity, recorder) ⇒ builder += ((identity, recorder.collect(collectionContext)))
- }
-
- builder.result()
- }
-
- def dispatchSelectedMetrics(lastTick: Long, currentTick: Long, subscriptions: Map[ActorRef, MetricSelectionFilter],
- snapshots: Map[MetricGroupIdentity, MetricGroupSnapshot]): Unit = {
-
- for ((subscriber, filter) ← subscriptions) {
- val selection = snapshots.filter(group ⇒ filter.accept(group._1))
- val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
-
- subscriber ! tickMetrics
- }
- }
-
- def scheduleFlushMessage(): Cancellable = {
- val config = context.system.settings.config
- val tickInterval = Duration(config.getMilliseconds("kamon.metrics.tick-interval"), TimeUnit.MILLISECONDS)
- context.system.scheduler.schedule(tickInterval, tickInterval, self, FlushMetrics)(context.dispatcher)
- }
-}
-
-object Subscriptions {
- case object FlushMetrics
- case class Unsubscribe(subscriber: ActorRef)
- case class Subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanently: Boolean = false)
- case class TickMetricSnapshot(from: Long, to: Long, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot])
-
- trait MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean
- }
-
- object MetricSelectionFilter {
- val empty = new MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = false
- }
-
- implicit class CombinableMetricSelectionFilter(msf: MetricSelectionFilter) {
- def combine(that: MetricSelectionFilter): MetricSelectionFilter = new MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = msf.accept(identity) || that.accept(identity)
- }
- }
- }
-
- case class GroupAndPatternFilter(category: MetricGroupCategory, globFilter: GlobPathFilter) extends MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = {
- category.equals(identity.category) && globFilter.accept(identity.name)
- }
- }
-}
-
-class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
- val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
- val collectionContext = Kamon(Metrics)(context.system).buildDefaultCollectionContext
-
- def receive = empty
-
- def empty: Actor.Receive = {
- case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
- case FlushBuffer ⇒ // Nothing to flush.
- }
-
- def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
- case TickMetricSnapshot(_, to, tickMetrics) ⇒
- val combinedMetrics = combineMaps(buffered.metrics, tickMetrics)(mergeMetricGroup)
- val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
-
- context become (buffering(combinedSnapshot))
-
- case FlushBuffer ⇒
- receiver ! buffered
- context become (empty)
-
- }
-
- override def postStop(): Unit = {
- flushSchedule.cancel()
- super.postStop()
- }
-
- def mergeMetricGroup(left: MetricGroupSnapshot, right: MetricGroupSnapshot) = left.merge(right.asInstanceOf[left.GroupSnapshotType], collectionContext).asInstanceOf[MetricGroupSnapshot] // ??? //Combined(combineMaps(left.metrics, right.metrics)((l, r) ⇒ l.merge(r, collectionContext)))
-}
-
-object TickMetricSnapshotBuffer {
- case object FlushBuffer
-
- def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
- Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala b/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala
new file mode 100644
index 00000000..68b545a5
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala
@@ -0,0 +1,116 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor._
+import kamon.metric.SubscriptionsDispatcher._
+import kamon.util.{ MilliTimestamp, GlobPathFilter }
+import scala.concurrent.duration.FiniteDuration
+
+/**
+ * Manages subscriptions to metrics and dispatch snapshots on every tick to all subscribers.
+ */
+private[kamon] class SubscriptionsDispatcher(interval: FiniteDuration, metricsExtension: MetricsExtensionImpl) extends Actor {
+ var lastTick = MilliTimestamp.now
+ var oneShotSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ var permanentSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ val tickSchedule = context.system.scheduler.schedule(interval, interval, self, Tick)(context.dispatcher)
+ val collectionContext = metricsExtension.buildDefaultCollectionContext
+
+ def receive = {
+ case Tick ⇒ processTick()
+ case Subscribe(filter, subscriber, permanently) ⇒ subscribe(filter, subscriber, permanently)
+ case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber)
+ case Terminated(subscriber) ⇒ unsubscribe(subscriber)
+ }
+
+ def processTick(): Unit =
+ dispatch(metricsExtension.collectSnapshots(collectionContext))
+
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanent: Boolean): Unit = {
+ def addSubscription(storage: Map[ActorRef, SubscriptionFilter]): Map[ActorRef, SubscriptionFilter] =
+ storage.updated(subscriber, storage.getOrElse(subscriber, SubscriptionFilter.Empty).combine(filter))
+
+ context.watch(subscriber)
+
+ if (permanent)
+ permanentSubscriptions = addSubscription(permanentSubscriptions)
+ else
+ oneShotSubscriptions = addSubscription(oneShotSubscriptions)
+ }
+
+ def unsubscribe(subscriber: ActorRef): Unit = {
+ permanentSubscriptions = permanentSubscriptions - subscriber
+ oneShotSubscriptions = oneShotSubscriptions - subscriber
+ }
+
+ def dispatch(snapshots: Map[Entity, EntitySnapshot]): Unit = {
+ val currentTick = MilliTimestamp.now
+
+ dispatchSelections(lastTick, currentTick, permanentSubscriptions, snapshots)
+ dispatchSelections(lastTick, currentTick, oneShotSubscriptions, snapshots)
+
+ lastTick = currentTick
+ oneShotSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ }
+
+ def dispatchSelections(lastTick: MilliTimestamp, currentTick: MilliTimestamp, subscriptions: Map[ActorRef, SubscriptionFilter],
+ snapshots: Map[Entity, EntitySnapshot]): Unit = {
+
+ for ((subscriber, filter) ← subscriptions) {
+ val selection = snapshots.filter(group ⇒ filter.accept(group._1))
+ val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
+
+ subscriber ! tickMetrics
+ }
+ }
+}
+
+object SubscriptionsDispatcher {
+ def props(interval: FiniteDuration, metricsExtension: MetricsExtensionImpl): Props =
+ Props(new SubscriptionsDispatcher(interval, metricsExtension))
+
+ case object Tick
+ case class Unsubscribe(subscriber: ActorRef)
+ case class Subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanently: Boolean = false)
+ case class TickMetricSnapshot(from: MilliTimestamp, to: MilliTimestamp, metrics: Map[Entity, EntitySnapshot])
+
+}
+
+trait SubscriptionFilter { self ⇒
+
+ def accept(entity: Entity): Boolean
+
+ final def combine(that: SubscriptionFilter): SubscriptionFilter = new SubscriptionFilter {
+ override def accept(entity: Entity): Boolean = self.accept(entity) || that.accept(entity)
+ }
+}
+
+object SubscriptionFilter {
+ val Empty = new SubscriptionFilter {
+ def accept(entity: Entity): Boolean = false
+ }
+
+ def apply(category: String, name: String): SubscriptionFilter = new SubscriptionFilter {
+ val categoryPattern = new GlobPathFilter(category)
+ val namePattern = new GlobPathFilter(name)
+
+ def accept(entity: Entity): Boolean = {
+ categoryPattern.accept(entity.category) && namePattern.accept(entity.name)
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala b/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala
new file mode 100644
index 00000000..dfc5d5f0
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala
@@ -0,0 +1,65 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor.{ Props, Actor, ActorRef }
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.metric.TickMetricSnapshotBuffer.FlushBuffer
+import kamon.metric.instrument.CollectionContext
+import kamon.util.MapMerge
+
+import scala.concurrent.duration.FiniteDuration
+
+class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
+ import MapMerge.Syntax
+
+ val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
+ val collectionContext: CollectionContext = Kamon.metrics.buildDefaultCollectionContext
+
+ def receive = empty
+
+ def empty: Actor.Receive = {
+ case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
+ case FlushBuffer ⇒ // Nothing to flush.
+ }
+
+ def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
+ case TickMetricSnapshot(_, to, tickMetrics) ⇒
+ val combinedMetrics = buffered.metrics.merge(tickMetrics, (l, r) ⇒ l.merge(r, collectionContext))
+ val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
+
+ context become (buffering(combinedSnapshot))
+
+ case FlushBuffer ⇒
+ receiver ! buffered
+ context become (empty)
+
+ }
+
+ override def postStop(): Unit = {
+ flushSchedule.cancel()
+ super.postStop()
+ }
+}
+
+object TickMetricSnapshotBuffer {
+ case object FlushBuffer
+
+ def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
+ Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
index eaad6e0d..3da9c1d4 100644
--- a/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
@@ -16,67 +16,29 @@
package kamon.metric
-import akka.actor.ActorSystem
-import kamon.metric.instrument.{ Histogram }
+import kamon.metric.instrument.{ Time, InstrumentFactory, Histogram }
-import scala.collection.concurrent.TrieMap
-import com.typesafe.config.Config
+class TraceMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ import TraceMetrics.segmentKey
-case class TraceMetrics(name: String) extends MetricGroupIdentity {
- val category = TraceMetrics
-}
-
-object TraceMetrics extends MetricGroupCategory {
- import Metrics.AtomicGetOrElseUpdateForTriemap
-
- val name = "trace"
-
- case object ElapsedTime extends MetricIdentity { val name = "elapsed-time" }
-
- case class TraceMetricRecorder(elapsedTime: Histogram, private val segmentRecorderFactory: () ⇒ Histogram)
- extends MetricGroupRecorder {
-
- val segments = TrieMap[MetricIdentity, Histogram]()
-
- def segmentRecorder(segmentIdentity: MetricIdentity): Histogram =
- segments.atomicGetOrElseUpdate(segmentIdentity, segmentRecorderFactory.apply())
-
- def collect(context: CollectionContext): TraceMetricsSnapshot =
- TraceMetricsSnapshot(
- elapsedTime.collect(context),
- segments.map { case (identity, recorder) ⇒ (identity, recorder.collect(context)) }.toMap)
+ /**
+ * Records blah blah
+ */
+ val ElapsedTime = histogram("elapsed-time", unitOfMeasurement = Time.Nanoseconds)
- def cleanup: Unit = {}
- }
-
- case class TraceMetricsSnapshot(elapsedTime: Histogram.Snapshot, segments: Map[MetricIdentity, Histogram.Snapshot])
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = TraceMetricsSnapshot
-
- def merge(that: TraceMetricsSnapshot, context: CollectionContext): TraceMetricsSnapshot =
- TraceMetricsSnapshot(elapsedTime.merge(that.elapsedTime, context), combineMaps(segments, that.segments)((l, r) ⇒ l.merge(r, context)))
-
- def metrics: Map[MetricIdentity, MetricSnapshot] = segments + (ElapsedTime -> elapsedTime)
- }
-
- val Factory = TraceMetricGroupFactory
+ /**
+ * Records Blah Blah.
+ *
+ */
+ def segment(name: String, category: String, library: String): Histogram =
+ histogram(segmentKey(name, category, library))
}
-case object TraceMetricGroupFactory extends MetricGroupFactory {
-
- import TraceMetrics._
-
- type GroupRecorder = TraceMetricRecorder
-
- def create(config: Config, system: ActorSystem): TraceMetricRecorder = {
- val settings = config.getConfig("precision.trace")
- val elapsedTimeConfig = settings.getConfig("elapsed-time")
- val segmentConfig = settings.getConfig("segment")
+object TraceMetrics extends EntityRecorderFactory[TraceMetrics] {
+ def category: String = "trace"
+ def createRecorder(instrumentFactory: InstrumentFactory): TraceMetrics = new TraceMetrics(instrumentFactory)
- new TraceMetricRecorder(
- Histogram.fromConfig(elapsedTimeConfig, Scale.Nano),
- () ⇒ Histogram.fromConfig(segmentConfig, Scale.Nano))
- }
+ def segmentKey(name: String, category: String, library: String): HistogramKey =
+ HistogramKey(name, Time.Nanoseconds, Map("category" -> category, "library" -> library))
} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
index b7ac1ac5..e0818292 100644
--- a/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
@@ -1,189 +1,204 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric
-import akka.actor
-import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
-import kamon.Kamon
-import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.metric.instrument._
import scala.concurrent.duration.FiniteDuration
-class UserMetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- import Metrics.AtomicGetOrElseUpdateForTriemap
- import UserMetrics._
+trait UserMetricsExtension {
+ def histogram(name: String): Histogram
+ def histogram(name: String, dynamicRange: DynamicRange): Histogram
+ def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram
+ def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram
+ def histogram(key: HistogramKey): Histogram
+ def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram
+ def removeHistogram(name: String): Unit
+ def removeHistogram(key: HistogramKey): Unit
+
+ def minMaxCounter(name: String): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter
+ def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter
+ def removeMinMaxCounter(name: String): Unit
+ def removeMinMaxCounter(key: MinMaxCounterKey): Unit
+
+ def gauge(name: String, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def removeGauge(name: String): Unit
+ def removeGauge(key: GaugeKey): Unit
+
+ def counter(name: String): Counter
+ def counter(key: CounterKey): Counter
+ def removeCounter(name: String): Unit
+ def removeCounter(key: CounterKey): Unit
- lazy val metricsExtension = Kamon(Metrics)(system)
- val precisionConfig = system.settings.config.getConfig("kamon.metrics.precision")
+}
- val defaultHistogramPrecisionConfig = precisionConfig.getConfig("default-histogram-precision")
- val defaultMinMaxCounterPrecisionConfig = precisionConfig.getConfig("default-min-max-counter-precision")
- val defaultGaugePrecisionConfig = precisionConfig.getConfig("default-gauge-precision")
+private[kamon] class UserMetricsExtensionImpl(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with UserMetricsExtension {
+ override def histogram(name: String): Histogram =
+ super.histogram(name)
- def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserHistogram(name), {
- UserHistogramRecorder(Histogram(highestTrackableValue, precision, Scale.Unit))
- }).asInstanceOf[UserHistogramRecorder].histogram
- }
+ override def histogram(name: String, dynamicRange: DynamicRange): Histogram =
+ super.histogram(name, dynamicRange)
- def registerHistogram(name: String): Histogram = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserHistogram(name), {
- UserHistogramRecorder(Histogram.fromConfig(defaultHistogramPrecisionConfig))
- }).asInstanceOf[UserHistogramRecorder].histogram
- }
+ override def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ super.histogram(name, unitOfMeasurement)
- def registerCounter(name: String): Counter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserCounter(name), {
- UserCounterRecorder(Counter())
- }).asInstanceOf[UserCounterRecorder].counter
- }
+ override def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ super.histogram(name, dynamicRange, unitOfMeasurement)
- def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration): MinMaxCounter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserMinMaxCounter(name), {
- UserMinMaxCounterRecorder(MinMaxCounter(highestTrackableValue, precision, Scale.Unit, refreshInterval, system))
- }).asInstanceOf[UserMinMaxCounterRecorder].minMaxCounter
- }
+ override def histogram(key: HistogramKey): Histogram =
+ super.histogram(key)
- def registerMinMaxCounter(name: String): MinMaxCounter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserMinMaxCounter(name), {
- UserMinMaxCounterRecorder(MinMaxCounter.fromConfig(defaultMinMaxCounterPrecisionConfig, system))
- }).asInstanceOf[UserMinMaxCounterRecorder].minMaxCounter
- }
+ override def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram =
+ super.histogram(key, dynamicRange)
- def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserGauge(name), {
- UserGaugeRecorder(Gauge.fromConfig(defaultGaugePrecisionConfig, system)(currentValueCollector))
- }).asInstanceOf[UserGaugeRecorder].gauge
- }
+ override def removeHistogram(name: String): Unit =
+ super.removeHistogram(name)
- def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserGauge(name), {
- UserGaugeRecorder(Gauge(precision, highestTrackableValue, Scale.Unit, refreshInterval, system)(currentValueCollector))
- }).asInstanceOf[UserGaugeRecorder].gauge
- }
+ override def removeHistogram(key: HistogramKey): Unit =
+ super.removeHistogram(key)
- def removeHistogram(name: String): Unit =
- metricsExtension.unregister(UserHistogram(name))
+ override def minMaxCounter(name: String): MinMaxCounter =
+ super.minMaxCounter(name)
- def removeCounter(name: String): Unit =
- metricsExtension.unregister(UserCounter(name))
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange)
- def removeMinMaxCounter(name: String): Unit =
- metricsExtension.unregister(UserMinMaxCounter(name))
+ override def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(name, refreshInterval)
- def removeGauge(name: String): Unit =
- metricsExtension.unregister(UserGauge(name))
-}
+ override def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, unitOfMeasurement)
-object UserMetrics extends ExtensionId[UserMetricsExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, refreshInterval)
- def createExtension(system: ExtendedActorSystem): UserMetricsExtension = new UserMetricsExtension(system)
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, unitOfMeasurement)
- sealed trait UserMetricGroup
- //
- // Histograms
- //
+ override def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, refreshInterval, unitOfMeasurement)
- case class UserHistogram(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserHistograms
- }
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, refreshInterval, unitOfMeasurement)
- case class UserHistogramRecorder(histogram: Histogram) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserHistogramSnapshot(histogram.collect(context))
+ override def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter =
+ super.minMaxCounter(key)
- def cleanup: Unit = histogram.cleanup
- }
+ override def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter =
+ super.minMaxCounter(key, dynamicRange)
- case class UserHistogramSnapshot(histogramSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserHistogramSnapshot
+ override def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(key, refreshInterval)
- def merge(that: UserHistogramSnapshot, context: CollectionContext): UserHistogramSnapshot =
- UserHistogramSnapshot(that.histogramSnapshot.merge(histogramSnapshot, context))
+ override def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(key, dynamicRange, refreshInterval)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, histogramSnapshot))
- }
+ override def removeMinMaxCounter(name: String): Unit =
+ super.removeMinMaxCounter(name)
- //
- // Counters
- //
+ override def removeMinMaxCounter(key: MinMaxCounterKey): Unit =
+ super.removeMinMaxCounter(key)
- case class UserCounter(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserCounters
- }
+ override def gauge(name: String, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, valueCollector)
- case class UserCounterRecorder(counter: Counter) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserCounterSnapshot(counter.collect(context))
+ override def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, valueCollector)
- def cleanup: Unit = counter.cleanup
- }
+ override def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, refreshInterval, valueCollector)
- case class UserCounterSnapshot(counterSnapshot: Counter.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserCounterSnapshot
+ override def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, unitOfMeasurement, valueCollector)
- def merge(that: UserCounterSnapshot, context: CollectionContext): UserCounterSnapshot =
- UserCounterSnapshot(that.counterSnapshot.merge(counterSnapshot, context))
+ override def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, refreshInterval, valueCollector)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((Count, counterSnapshot))
- }
+ override def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, unitOfMeasurement, valueCollector)
- //
- // MinMaxCounters
- //
+ override def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, refreshInterval, unitOfMeasurement, valueCollector)
- case class UserMinMaxCounter(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserMinMaxCounters
- }
+ override def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, refreshInterval, unitOfMeasurement, valueCollector)
- case class UserMinMaxCounterRecorder(minMaxCounter: MinMaxCounter) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserMinMaxCounterSnapshot(minMaxCounter.collect(context))
+ override def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, valueCollector)
- def cleanup: Unit = minMaxCounter.cleanup
- }
+ override def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, dynamicRange, valueCollector)
- case class UserMinMaxCounterSnapshot(minMaxCounterSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserMinMaxCounterSnapshot
+ override def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, refreshInterval, valueCollector)
- def merge(that: UserMinMaxCounterSnapshot, context: CollectionContext): UserMinMaxCounterSnapshot =
- UserMinMaxCounterSnapshot(that.minMaxCounterSnapshot.merge(minMaxCounterSnapshot, context))
+ override def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, dynamicRange, refreshInterval, valueCollector)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, minMaxCounterSnapshot))
- }
+ override def removeGauge(name: String): Unit =
+ super.removeGauge(name)
- //
- // Gauges
- //
+ override def removeGauge(key: GaugeKey): Unit =
+ super.removeGauge(key)
- case class UserGauge(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserGauges
- }
+ override def counter(name: String): Counter =
+ super.counter(name)
- case class UserGaugeRecorder(gauge: Gauge) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserGaugeSnapshot(gauge.collect(context))
+ override def counter(key: CounterKey): Counter =
+ super.counter(key)
- def cleanup: Unit = gauge.cleanup
- }
+ override def removeCounter(name: String): Unit =
+ super.removeCounter(name)
- case class UserGaugeSnapshot(gaugeSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserGaugeSnapshot
-
- def merge(that: UserGaugeSnapshot, context: CollectionContext): UserGaugeSnapshot =
- UserGaugeSnapshot(that.gaugeSnapshot.merge(gaugeSnapshot, context))
-
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, gaugeSnapshot))
- }
+ override def removeCounter(key: CounterKey): Unit =
+ super.removeCounter(key)
+}
- case object UserHistograms extends MetricGroupCategory { val name: String = "histogram" }
- case object UserCounters extends MetricGroupCategory { val name: String = "counter" }
- case object UserMinMaxCounters extends MetricGroupCategory { val name: String = "min-max-counter" }
- case object UserGauges extends MetricGroupCategory { val name: String = "gauge" }
+private[kamon] object UserMetricsExtensionImpl {
+ val UserMetricEntity = Entity("user-metric", "user-metric")
- case object RecordedValues extends MetricIdentity { val name: String = "values" }
- case object Count extends MetricIdentity { val name: String = "count" }
+ def apply(metricsExtension: MetricsExtension): UserMetricsExtensionImpl = {
+ val instrumentFactory = metricsExtension.instrumentFactory(UserMetricEntity.category)
+ val userMetricsExtension = new UserMetricsExtensionImpl(instrumentFactory)
-}
+ metricsExtension.register(UserMetricEntity, userMetricsExtension).recorder
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala b/kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala
index e79090a8..e79090a8 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
index 0f29ba6f..c1b69cbe 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
@@ -17,9 +17,8 @@
package kamon.metric.instrument
import kamon.jsr166.LongAdder
-import kamon.metric.{ CollectionContext, MetricSnapshot, MetricRecorder }
-trait Counter extends MetricRecorder {
+trait Counter extends Instrument {
type SnapshotType = Counter.Snapshot
def increment(): Unit
@@ -29,12 +28,11 @@ trait Counter extends MetricRecorder {
object Counter {
def apply(): Counter = new LongAdderCounter
+ def create(): Counter = apply()
- trait Snapshot extends MetricSnapshot {
- type SnapshotType = Counter.Snapshot
-
+ trait Snapshot extends InstrumentSnapshot {
def count: Long
- def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Counter.Snapshot
}
}
@@ -55,5 +53,8 @@ class LongAdderCounter extends Counter {
}
case class CounterSnapshot(count: Long) extends Counter.Snapshot {
- def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot = CounterSnapshot(count + that.count)
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Counter.Snapshot = that match {
+ case CounterSnapshot(thatCount) ⇒ CounterSnapshot(count + thatCount)
+ case other ⇒ sys.error(s"Cannot merge a CounterSnapshot with the incompatible [${other.getClass.getName}] type.")
+ }
} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
index 0c1815c3..80214510 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
@@ -1,70 +1,102 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric.instrument
-import java.util.concurrent.TimeUnit
-import java.util.concurrent.atomic.AtomicReference
+import java.util.concurrent.atomic.{ AtomicLong, AtomicLongFieldUpdater, AtomicReference }
-import akka.actor.{ Cancellable, ActorSystem }
-import com.typesafe.config.Config
-import kamon.metric.{ CollectionContext, Scale, MetricRecorder }
+import akka.actor.Cancellable
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
import scala.concurrent.duration.FiniteDuration
-trait Gauge extends MetricRecorder {
+trait Gauge extends Instrument {
type SnapshotType = Histogram.Snapshot
- def record(value: Long)
- def record(value: Long, count: Long)
+ def record(value: Long): Unit
+ def record(value: Long, count: Long): Unit
+ def refreshValue(): Unit
}
object Gauge {
- trait CurrentValueCollector {
- def currentValue: Long
- }
-
- def apply(precision: Histogram.Precision, highestTrackableValue: Long, scale: Scale, refreshInterval: FiniteDuration,
- system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
-
- val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
- val gauge = new HistogramBackedGauge(underlyingHistogram, currentValueCollector)
-
- val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ def apply(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler, valueCollector: CurrentValueCollector): Gauge = {
+ val underlyingHistogram = Histogram(dynamicRange)
+ val gauge = new HistogramBackedGauge(underlyingHistogram, valueCollector)
+ val refreshValuesSchedule = scheduler.schedule(refreshInterval, () ⇒ {
gauge.refreshValue()
- }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+ })
- gauge.refreshValuesSchedule.set(refreshValuesSchedule)
+ gauge.automaticValueCollectorSchedule.set(refreshValuesSchedule)
gauge
}
- def fromDefaultConfig(system: ActorSystem)(currentValueCollectorFunction: () ⇒ Long): Gauge =
- fromDefaultConfig(system, functionZeroAsCurrentValueCollector(currentValueCollectorFunction))
+ def create(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler, valueCollector: CurrentValueCollector): Gauge =
+ apply(dynamicRange, refreshInterval, scheduler, valueCollector)
- def fromDefaultConfig(system: ActorSystem, currentValueCollector: CurrentValueCollector): Gauge = {
- val config = system.settings.config.getConfig("kamon.metrics.precision.default-gauge-precision")
- fromConfig(config, system)(currentValueCollector)
+ trait CurrentValueCollector {
+ def currentValue: Long
}
- def fromConfig(config: Config, system: ActorSystem, scale: Scale)(currentValueCollector: CurrentValueCollector): Gauge = {
- import scala.concurrent.duration._
+ implicit def functionZeroAsCurrentValueCollector(f: () ⇒ Long): CurrentValueCollector = new CurrentValueCollector {
+ def currentValue: Long = f.apply()
+ }
+}
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
- val refreshInterval = config.getMilliseconds("refresh-interval").toInt
+/**
+ * Helper for cases in which a gauge shouldn't store the current value of a observed value but the difference between
+ * the current observed value and the previously observed value. Should only be used if the observed value is always
+ * increasing or staying steady, but is never able to decrease.
+ *
+ * Note: The first time a value is collected, this wrapper will always return zero, afterwards, the difference between
+ * the current value and the last value will be returned.
+ */
+class DifferentialValueCollector(wrappedValueCollector: CurrentValueCollector) extends CurrentValueCollector {
+ @volatile private var _readAtLeastOnce = false
+ private val _lastObservedValue = new AtomicLong(0)
+
+ def currentValue: Long = {
+ if (_readAtLeastOnce) {
+ val wrappedCurrent = wrappedValueCollector.currentValue
+ val diff = wrappedCurrent - _lastObservedValue.getAndSet(wrappedCurrent)
+
+ if (diff >= 0) diff else 0L
+
+ } else {
+ _lastObservedValue.set(wrappedValueCollector.currentValue)
+ _readAtLeastOnce = true
+ 0L
+ }
- Gauge(Histogram.Precision(significantDigits), highest, scale, refreshInterval.millis, system)(currentValueCollector)
}
+}
- def fromConfig(config: Config, system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
- fromConfig(config, system, Scale.Unit)(currentValueCollector)
- }
+object DifferentialValueCollector {
+ def apply(wrappedValueCollector: CurrentValueCollector): CurrentValueCollector =
+ new DifferentialValueCollector(wrappedValueCollector)
- implicit def functionZeroAsCurrentValueCollector(f: () ⇒ Long): CurrentValueCollector = new CurrentValueCollector {
- def currentValue: Long = f.apply()
- }
+ def apply(wrappedValueCollector: ⇒ Long): CurrentValueCollector =
+ new DifferentialValueCollector(new CurrentValueCollector {
+ def currentValue: Long = wrappedValueCollector
+ })
}
class HistogramBackedGauge(underlyingHistogram: Histogram, currentValueCollector: Gauge.CurrentValueCollector) extends Gauge {
- val refreshValuesSchedule = new AtomicReference[Cancellable]()
+ private[kamon] val automaticValueCollectorSchedule = new AtomicReference[Cancellable]()
def record(value: Long): Unit = underlyingHistogram.record(value)
@@ -73,10 +105,12 @@ class HistogramBackedGauge(underlyingHistogram: Histogram, currentValueCollector
def collect(context: CollectionContext): Histogram.Snapshot = underlyingHistogram.collect(context)
def cleanup: Unit = {
- if (refreshValuesSchedule.get() != null)
- refreshValuesSchedule.get().cancel()
+ if (automaticValueCollectorSchedule.get() != null)
+ automaticValueCollectorSchedule.get().cancel()
}
- def refreshValue(): Unit = underlyingHistogram.record(currentValueCollector.currentValue)
+ def refreshValue(): Unit =
+ underlyingHistogram.record(currentValueCollector.currentValue)
+
}
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
index bed75fc8..5c4c7f71 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
@@ -17,12 +17,11 @@
package kamon.metric.instrument
import java.nio.LongBuffer
-import com.typesafe.config.Config
import org.HdrHistogram.AtomicHistogramFieldsAccessor
+import kamon.metric.instrument.Histogram.{ Snapshot, DynamicRange }
import org.HdrHistogram.AtomicHistogram
-import kamon.metric._
-trait Histogram extends MetricRecorder {
+trait Histogram extends Instrument {
type SnapshotType = Histogram.Snapshot
def record(value: Long)
@@ -31,30 +30,40 @@ trait Histogram extends MetricRecorder {
object Histogram {
- def apply(highestTrackableValue: Long, precision: Precision, scale: Scale): Histogram =
- new HdrHistogram(1L, highestTrackableValue, precision.significantDigits, scale)
-
- def fromConfig(config: Config): Histogram = {
- fromConfig(config, Scale.Unit)
- }
-
- def fromConfig(config: Config, scale: Scale): Histogram = {
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
-
- new HdrHistogram(1L, highest, significantDigits, scale)
- }
-
- object HighestTrackableValue {
- val OneHourInNanoseconds = 3600L * 1000L * 1000L * 1000L
- }
-
- case class Precision(significantDigits: Int)
- object Precision {
- val Low = Precision(1)
- val Normal = Precision(2)
- val Fine = Precision(3)
- }
+ /**
+ * Scala API:
+ *
+ * Create a new High Dynamic Range Histogram ([[kamon.metric.instrument.HdrHistogram]]) using the given
+ * [[kamon.metric.instrument.Histogram.DynamicRange]].
+ */
+ def apply(dynamicRange: DynamicRange): Histogram = new HdrHistogram(dynamicRange)
+
+ /**
+ * Java API:
+ *
+ * Create a new High Dynamic Range Histogram ([[kamon.metric.instrument.HdrHistogram]]) using the given
+ * [[kamon.metric.instrument.Histogram.DynamicRange]].
+ */
+ def create(dynamicRange: DynamicRange): Histogram = apply(dynamicRange)
+
+ /**
+ * DynamicRange is a configuration object used to supply range and precision configuration to a
+ * [[kamon.metric.instrument.HdrHistogram]]. See the [[http://hdrhistogram.github.io/HdrHistogram/ HdrHistogram website]]
+ * for more details on how it works and the effects of these configuration values.
+ *
+ * @param lowestDiscernibleValue
+ * The lowest value that can be discerned (distinguished from 0) by the histogram.Must be a positive integer that
+ * is >= 1. May be internally rounded down to nearest power of 2.
+ *
+ * @param highestTrackableValue
+ * The highest value to be tracked by the histogram. Must be a positive integer that is >= (2 * lowestDiscernibleValue).
+ * Must not be larger than (Long.MAX_VALUE/2).
+ *
+ * @param precision
+ * The number of significant decimal digits to which the histogram will maintain value resolution and separation.
+ * Must be a non-negative integer between 1 and 3.
+ */
+ case class DynamicRange(lowestDiscernibleValue: Long, highestTrackableValue: Long, precision: Int)
trait Record {
def level: Long
@@ -67,29 +76,28 @@ object Histogram {
var rawCompactRecord: Long = 0L
}
- trait Snapshot extends MetricSnapshot {
- type SnapshotType = Histogram.Snapshot
+ trait Snapshot extends InstrumentSnapshot {
def isEmpty: Boolean = numberOfMeasurements == 0
- def scale: Scale
def numberOfMeasurements: Long
def min: Long
def max: Long
def sum: Long
def percentile(percentile: Double): Long
def recordsIterator: Iterator[Record]
+ def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot
def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot
}
object Snapshot {
- def empty(targetScale: Scale) = new Snapshot {
+ val empty = new Snapshot {
override def min: Long = 0L
override def max: Long = 0L
override def sum: Long = 0L
override def percentile(percentile: Double): Long = 0L
override def recordsIterator: Iterator[Record] = Iterator.empty
- override def merge(that: Snapshot, context: CollectionContext): Snapshot = that
- override def scale: Scale = targetScale
+ override def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot = that
+ override def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot = that
override def numberOfMeasurements: Long = 0L
}
}
@@ -100,10 +108,8 @@ object Histogram {
* The collect(..) operation extracts all the recorded values from the histogram and resets the counts, but still
* leave it in a consistent state even in the case of concurrent modification while the snapshot is being taken.
*/
-class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, significantValueDigits: Int, scale: Scale = Scale.Unit)
- extends AtomicHistogram(lowestTrackableValue, highestTrackableValue, significantValueDigits)
- with Histogram with AtomicHistogramFieldsAccessor {
-
+class HdrHistogram(dynamicRange: DynamicRange) extends AtomicHistogram(dynamicRange.lowestDiscernibleValue,
+ dynamicRange.highestTrackableValue, dynamicRange.precision) with Histogram with AtomicHistogramFieldsAccessor {
import AtomicHistogramFieldsAccessor.totalCountUpdater
def record(value: Long): Unit = recordValue(value)
@@ -119,7 +125,7 @@ class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, sign
val measurementsArray = Array.ofDim[Long](buffer.limit())
buffer.get(measurementsArray, 0, measurementsArray.length)
- new CompactHdrSnapshot(scale, nrOfMeasurements, measurementsArray, unitMagnitude(), subBucketHalfCount(), subBucketHalfCountMagnitude())
+ new CompactHdrSnapshot(nrOfMeasurements, measurementsArray, unitMagnitude(), subBucketHalfCount(), subBucketHalfCountMagnitude())
}
def getCounts = countsArray().length()
@@ -160,7 +166,7 @@ class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, sign
}
-case class CompactHdrSnapshot(val scale: Scale, val numberOfMeasurements: Long, compactRecords: Array[Long], unitMagnitude: Int,
+case class CompactHdrSnapshot(val numberOfMeasurements: Long, compactRecords: Array[Long], unitMagnitude: Int,
subBucketHalfCount: Int, subBucketHalfCountMagnitude: Int) extends Histogram.Snapshot {
def min: Long = if (compactRecords.length == 0) 0 else levelFromCompactRecord(compactRecords(0))
@@ -182,53 +188,61 @@ case class CompactHdrSnapshot(val scale: Scale, val numberOfMeasurements: Long,
percentileLevel
}
- def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot = {
- if (that.isEmpty) this else if (this.isEmpty) that else {
- import context.buffer
- buffer.clear()
+ def merge(that: Histogram.Snapshot, context: CollectionContext): Snapshot =
+ merge(that.asInstanceOf[InstrumentSnapshot], context)
- val selfIterator = recordsIterator
- val thatIterator = that.recordsIterator
- var thatCurrentRecord: Histogram.Record = null
- var mergedNumberOfMeasurements = 0L
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Histogram.Snapshot = that match {
+ case thatSnapshot: CompactHdrSnapshot ⇒
+ if (thatSnapshot.isEmpty) this else if (this.isEmpty) thatSnapshot else {
+ import context.buffer
+ buffer.clear()
- def nextOrNull(iterator: Iterator[Histogram.Record]): Histogram.Record = if (iterator.hasNext) iterator.next() else null
- def addToBuffer(compactRecord: Long): Unit = {
- mergedNumberOfMeasurements += countFromCompactRecord(compactRecord)
- buffer.put(compactRecord)
- }
+ val selfIterator = recordsIterator
+ val thatIterator = thatSnapshot.recordsIterator
+ var thatCurrentRecord: Histogram.Record = null
+ var mergedNumberOfMeasurements = 0L
- while (selfIterator.hasNext) {
- val selfCurrentRecord = selfIterator.next()
+ def nextOrNull(iterator: Iterator[Histogram.Record]): Histogram.Record = if (iterator.hasNext) iterator.next() else null
+ def addToBuffer(compactRecord: Long): Unit = {
+ mergedNumberOfMeasurements += countFromCompactRecord(compactRecord)
+ buffer.put(compactRecord)
+ }
- // Advance that to no further than the level of selfCurrentRecord
- thatCurrentRecord = if (thatCurrentRecord == null) nextOrNull(thatIterator) else thatCurrentRecord
- while (thatCurrentRecord != null && thatCurrentRecord.level < selfCurrentRecord.level) {
- addToBuffer(thatCurrentRecord.rawCompactRecord)
- thatCurrentRecord = nextOrNull(thatIterator)
+ while (selfIterator.hasNext) {
+ val selfCurrentRecord = selfIterator.next()
+
+ // Advance that to no further than the level of selfCurrentRecord
+ thatCurrentRecord = if (thatCurrentRecord == null) nextOrNull(thatIterator) else thatCurrentRecord
+ while (thatCurrentRecord != null && thatCurrentRecord.level < selfCurrentRecord.level) {
+ addToBuffer(thatCurrentRecord.rawCompactRecord)
+ thatCurrentRecord = nextOrNull(thatIterator)
+ }
+
+ // Include the current record of self and optionally merge if has the same level as thatCurrentRecord
+ if (thatCurrentRecord != null && thatCurrentRecord.level == selfCurrentRecord.level) {
+ addToBuffer(mergeCompactRecords(thatCurrentRecord.rawCompactRecord, selfCurrentRecord.rawCompactRecord))
+ thatCurrentRecord = nextOrNull(thatIterator)
+ } else {
+ addToBuffer(selfCurrentRecord.rawCompactRecord)
+ }
}
- // Include the current record of self and optionally merge if has the same level as thatCurrentRecord
- if (thatCurrentRecord != null && thatCurrentRecord.level == selfCurrentRecord.level) {
- addToBuffer(mergeCompactRecords(thatCurrentRecord.rawCompactRecord, selfCurrentRecord.rawCompactRecord))
- thatCurrentRecord = nextOrNull(thatIterator)
- } else {
- addToBuffer(selfCurrentRecord.rawCompactRecord)
+ // Include everything that might have been left from that
+ if (thatCurrentRecord != null) addToBuffer(thatCurrentRecord.rawCompactRecord)
+ while (thatIterator.hasNext) {
+ addToBuffer(thatIterator.next().rawCompactRecord)
}
- }
- // Include everything that might have been left from that
- if (thatCurrentRecord != null) addToBuffer(thatCurrentRecord.rawCompactRecord)
- while (thatIterator.hasNext) {
- addToBuffer(thatIterator.next().rawCompactRecord)
+ buffer.flip()
+ val compactRecords = Array.ofDim[Long](buffer.limit())
+ buffer.get(compactRecords)
+
+ new CompactHdrSnapshot(mergedNumberOfMeasurements, compactRecords, unitMagnitude, subBucketHalfCount, subBucketHalfCountMagnitude)
}
- buffer.flip()
- val compactRecords = Array.ofDim[Long](buffer.limit())
- buffer.get(compactRecords)
+ case other ⇒
+ sys.error(s"Cannot merge a CompactHdrSnapshot with the incompatible [${other.getClass.getName}] type.")
- new CompactHdrSnapshot(scale, mergedNumberOfMeasurements, compactRecords, unitMagnitude, subBucketHalfCount, subBucketHalfCountMagnitude)
- }
}
@inline private def mergeCompactRecords(left: Long, right: Long): Long = {
diff --git a/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala
index 3761f5a5..59b4b443 100644
--- a/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala
@@ -1,6 +1,6 @@
/*
* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
@@ -14,23 +14,31 @@
* =========================================================================================
*/
-package kamon.metric
+package kamon.metric.instrument
-import java.nio.{ LongBuffer }
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
+import java.nio.LongBuffer
-trait MetricGroupCategory {
- def name: String
+import akka.actor.{ Scheduler, Cancellable }
+import akka.dispatch.MessageDispatcher
+import scala.concurrent.duration.FiniteDuration
+
+private[kamon] trait Instrument {
+ type SnapshotType <: InstrumentSnapshot
+
+ def collect(context: CollectionContext): SnapshotType
+ def cleanup: Unit
}
-trait MetricGroupIdentity {
- def name: String
- def category: MetricGroupCategory
+trait InstrumentSnapshot {
+ def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot
}
-trait MetricIdentity {
- def name: String
+class InstrumentType private[kamon] (val id: Int) extends AnyVal
+object InstrumentTypes {
+ val Histogram = new InstrumentType(1)
+ val MinMaxCounter = new InstrumentType(2)
+ val Gauge = new InstrumentType(3)
+ val Counter = new InstrumentType(4)
}
trait CollectionContext {
@@ -43,33 +51,3 @@ object CollectionContext {
}
}
-trait MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot
- def cleanup: Unit
-}
-
-trait MetricSnapshot {
- type SnapshotType
-
- def merge(that: SnapshotType, context: CollectionContext): SnapshotType
-}
-
-trait MetricGroupSnapshot {
- type GroupSnapshotType
-
- def metrics: Map[MetricIdentity, MetricSnapshot]
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType
-}
-
-private[kamon] trait MetricRecorder {
- type SnapshotType <: MetricSnapshot
-
- def collect(context: CollectionContext): SnapshotType
- def cleanup: Unit
-}
-
-trait MetricGroupFactory {
- type GroupRecorder <: MetricGroupRecorder
- def create(config: Config, system: ActorSystem): GroupRecorder
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala
new file mode 100644
index 00000000..7c0201f7
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala
@@ -0,0 +1,51 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+
+import scala.concurrent.duration.FiniteDuration
+
+case class InstrumentFactory(configurations: Map[String, InstrumentCustomSettings], defaults: DefaultInstrumentSettings, scheduler: RefreshScheduler) {
+
+ private def resolveSettings(instrumentName: String, codeSettings: Option[InstrumentSettings], default: InstrumentSettings): InstrumentSettings = {
+ configurations.get(instrumentName).flatMap { customSettings ⇒
+ codeSettings.map(cs ⇒ customSettings.combine(cs)) orElse (Some(customSettings.combine(default)))
+
+ } getOrElse (codeSettings.getOrElse(default))
+ }
+
+ def createHistogram(name: String, dynamicRange: Option[DynamicRange] = None): Histogram = {
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, None)), defaults.histogram)
+ Histogram(settings.dynamicRange)
+ }
+
+ def createMinMaxCounter(name: String, dynamicRange: Option[DynamicRange] = None, refreshInterval: Option[FiniteDuration] = None): MinMaxCounter = {
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, refreshInterval)), defaults.minMaxCounter)
+ MinMaxCounter(settings.dynamicRange, settings.refreshInterval.get, scheduler)
+ }
+
+ def createGauge(name: String, dynamicRange: Option[DynamicRange] = None, refreshInterval: Option[FiniteDuration] = None,
+ valueCollector: CurrentValueCollector): Gauge = {
+
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, refreshInterval)), defaults.gauge)
+ Gauge(settings.dynamicRange, settings.refreshInterval.get, scheduler, valueCollector)
+ }
+
+ def createCounter(): Counter = Counter()
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala
new file mode 100644
index 00000000..29f8f46b
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala
@@ -0,0 +1,65 @@
+package kamon.metric.instrument
+
+import java.util.concurrent.TimeUnit
+
+import com.typesafe.config.Config
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.util.ConfigTools.Syntax
+
+import scala.concurrent.duration.FiniteDuration
+
+case class InstrumentCustomSettings(lowestDiscernibleValue: Option[Long], highestTrackableValue: Option[Long],
+ precision: Option[Int], refreshInterval: Option[FiniteDuration]) {
+
+ def combine(that: InstrumentSettings): InstrumentSettings =
+ InstrumentSettings(
+ DynamicRange(
+ lowestDiscernibleValue.getOrElse(that.dynamicRange.lowestDiscernibleValue),
+ highestTrackableValue.getOrElse(that.dynamicRange.highestTrackableValue),
+ precision.getOrElse(that.dynamicRange.precision)),
+ refreshInterval.orElse(that.refreshInterval))
+}
+
+object InstrumentCustomSettings {
+
+ def fromConfig(config: Config): InstrumentCustomSettings =
+ InstrumentCustomSettings(
+ if (config.hasPath("lowest-discernible-value")) Some(config.getLong("lowest-discernible-value")) else None,
+ if (config.hasPath("highest-trackable-value")) Some(config.getLong("highest-trackable-value")) else None,
+ if (config.hasPath("precision")) Some(InstrumentSettings.parsePrecision(config.getString("precision"))) else None,
+ if (config.hasPath("refresh-interval")) Some(config.getFiniteDuration("refresh-interval")) else None)
+
+}
+
+case class InstrumentSettings(dynamicRange: DynamicRange, refreshInterval: Option[FiniteDuration])
+
+object InstrumentSettings {
+
+ def readDynamicRange(config: Config): DynamicRange =
+ DynamicRange(
+ config.getLong("lowest-discernible-value"),
+ config.getLong("highest-trackable-value"),
+ parsePrecision(config.getString("precision")))
+
+ def parsePrecision(stringValue: String): Int = stringValue match {
+ case "low" ⇒ 1
+ case "normal" ⇒ 2
+ case "fine" ⇒ 3
+ case other ⇒ sys.error(s"Invalid precision configuration [$other] found, valid options are: [low|normal|fine].")
+ }
+}
+
+case class DefaultInstrumentSettings(histogram: InstrumentSettings, minMaxCounter: InstrumentSettings, gauge: InstrumentSettings)
+
+object DefaultInstrumentSettings {
+
+ def fromConfig(config: Config): DefaultInstrumentSettings = {
+ val histogramSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("histogram")), None)
+ val minMaxCounterSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("min-max-counter")),
+ Some(config.getFiniteDuration("min-max-counter.refresh-interval")))
+ val gaugeSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("gauge")),
+ Some(config.getFiniteDuration("gauge.refresh-interval")))
+
+ DefaultInstrumentSettings(histogramSettings, minMaxCounterSettings, gaugeSettings)
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
index 61cee02a..0828c8a9 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
@@ -17,16 +17,14 @@ package kamon.metric.instrument
*/
import java.lang.Math.abs
-import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicReference
-import akka.actor.{ ActorSystem, Cancellable }
-import com.typesafe.config.Config
+import akka.actor.Cancellable
import kamon.jsr166.LongMaxUpdater
-import kamon.metric.{ Scale, MetricRecorder, CollectionContext }
+import kamon.metric.instrument.Histogram.DynamicRange
import kamon.util.PaddedAtomicLong
import scala.concurrent.duration.FiniteDuration
-trait MinMaxCounter extends MetricRecorder {
+trait MinMaxCounter extends Instrument {
override type SnapshotType = Histogram.Snapshot
def increment(): Unit
@@ -38,29 +36,20 @@ trait MinMaxCounter extends MetricRecorder {
object MinMaxCounter {
- def apply(highestTrackableValue: Long, precision: Histogram.Precision, scale: Scale, refreshInterval: FiniteDuration,
- system: ActorSystem): MinMaxCounter = {
-
- val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
+ def apply(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler): MinMaxCounter = {
+ val underlyingHistogram = Histogram(dynamicRange)
val minMaxCounter = new PaddedMinMaxCounter(underlyingHistogram)
-
- val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ val refreshValuesSchedule = scheduler.schedule(refreshInterval, () ⇒ {
minMaxCounter.refreshValues()
- }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+ })
minMaxCounter.refreshValuesSchedule.set(refreshValuesSchedule)
minMaxCounter
}
- def fromConfig(config: Config, system: ActorSystem): MinMaxCounter = {
- import scala.concurrent.duration._
+ def create(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler): MinMaxCounter =
+ apply(dynamicRange, refreshInterval, scheduler)
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
- val refreshInterval = config.getMilliseconds("refresh-interval").toInt
-
- apply(highest, Histogram.Precision(significantDigits), Scale.Unit, refreshInterval.millis, system)
- }
}
class PaddedMinMaxCounter(underlyingHistogram: Histogram) extends MinMaxCounter {
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/RefreshScheduler.scala b/kamon-core/src/main/scala/kamon/metric/instrument/RefreshScheduler.scala
new file mode 100644
index 00000000..adb08713
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/RefreshScheduler.scala
@@ -0,0 +1,99 @@
+package kamon.metric.instrument
+
+import akka.actor.{ Scheduler, Cancellable }
+import org.HdrHistogram.WriterReaderPhaser
+
+import scala.collection.concurrent.TrieMap
+import scala.concurrent.ExecutionContext
+import scala.concurrent.duration.FiniteDuration
+
+trait RefreshScheduler {
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable
+}
+
+/**
+ * Default implementation of RefreshScheduler that simply uses an [[akka.actor.Scheduler]] to schedule tasks to be run
+ * in the provided ExecutionContext.
+ */
+class DefaultRefreshScheduler(scheduler: Scheduler, dispatcher: ExecutionContext) extends RefreshScheduler {
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable =
+ scheduler.schedule(interval, interval)(refresh.apply())(dispatcher)
+}
+
+object DefaultRefreshScheduler {
+ def apply(scheduler: Scheduler, dispatcher: ExecutionContext): RefreshScheduler =
+ new DefaultRefreshScheduler(scheduler, dispatcher)
+
+ def create(scheduler: Scheduler, dispatcher: ExecutionContext): RefreshScheduler =
+ apply(scheduler, dispatcher)
+}
+
+/**
+ * RefreshScheduler implementation that accumulates all the scheduled actions until it is pointed to another refresh
+ * scheduler. Once it is pointed, all subsequent calls to `schedule` will immediately be scheduled in the pointed
+ * scheduler.
+ */
+class LazyRefreshScheduler extends RefreshScheduler {
+ private val _schedulerPhaser = new WriterReaderPhaser
+ private val _backlog = new TrieMap[(FiniteDuration, () ⇒ Unit), RepointableCancellable]()
+ @volatile private var _target: Option[RefreshScheduler] = None
+
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable = {
+ val criticalEnter = _schedulerPhaser.writerCriticalSectionEnter()
+ try {
+ _target.map { scheduler ⇒
+ scheduler.schedule(interval, refresh)
+
+ } getOrElse {
+ val entry = (interval, refresh)
+ val cancellable = new RepointableCancellable(entry)
+
+ _backlog.put(entry, cancellable)
+ cancellable
+ }
+
+ } finally {
+ _schedulerPhaser.writerCriticalSectionExit(criticalEnter)
+ }
+ }
+
+ def point(target: RefreshScheduler): Unit = try {
+ _schedulerPhaser.readerLock()
+
+ if (_target.isEmpty) {
+ _target = Some(target)
+ _schedulerPhaser.flipPhase(10000L)
+ _backlog.dropWhile {
+ case ((interval, refresh), repointableCancellable) ⇒
+ repointableCancellable.point(target.schedule(interval, refresh))
+ true
+ }
+ } else sys.error("A LazyRefreshScheduler cannot be pointed more than once.")
+ } finally { _schedulerPhaser.readerUnlock() }
+
+ class RepointableCancellable(entry: (FiniteDuration, () ⇒ Unit)) extends Cancellable {
+ private var _isCancelled = false
+ private var _cancellable: Option[Cancellable] = None
+
+ def isCancelled: Boolean = synchronized {
+ _cancellable.map(_.isCancelled).getOrElse(_isCancelled)
+ }
+
+ def cancel(): Boolean = synchronized {
+ _isCancelled = true
+ _cancellable.map(_.cancel()).getOrElse(_backlog.remove(entry).nonEmpty)
+ }
+
+ def point(cancellable: Cancellable): Unit = synchronized {
+ if (_cancellable.isEmpty) {
+ _cancellable = Some(cancellable)
+
+ if (_isCancelled)
+ cancellable.cancel()
+
+ } else sys.error("A RepointableCancellable cannot be pointed more than once.")
+
+ }
+ }
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala b/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala
new file mode 100644
index 00000000..f2a061d1
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala
@@ -0,0 +1,71 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+trait UnitOfMeasurement {
+ def name: String
+ def label: String
+ def factor: Double
+}
+
+object UnitOfMeasurement {
+ case object Unknown extends UnitOfMeasurement {
+ val name = "unknown"
+ val label = "unknown"
+ val factor = 1D
+ }
+
+ def isUnknown(uom: UnitOfMeasurement): Boolean =
+ uom == Unknown
+
+ def isTime(uom: UnitOfMeasurement): Boolean =
+ uom.isInstanceOf[Time]
+
+}
+
+case class Time(factor: Double, label: String) extends UnitOfMeasurement {
+ val name = "time"
+
+ /**
+ * Scale a value from this scale factor to a different scale factor.
+ *
+ * @param toUnit Time unit of the expected result.
+ * @param value Value to scale.
+ * @return Equivalent of value on the target time unit.
+ */
+ def scale(toUnit: Time)(value: Long): Double =
+ (value * factor) / toUnit.factor
+}
+
+object Time {
+ val Nanoseconds = Time(1E-9, "n")
+ val Microseconds = Time(1E-6, "µs")
+ val Milliseconds = Time(1E-3, "ms")
+ val Seconds = Time(1, "s")
+}
+
+case class Memory(factor: Double, label: String) extends UnitOfMeasurement {
+ val name = "bytes"
+}
+
+object Memory {
+ val Bytes = Memory(1, "b")
+ val KiloBytes = Memory(1024, "Kb")
+ val MegaBytes = Memory(1024E2, "Mb")
+ val GigaBytes = Memory(1024E3, "Gb")
+}
+
diff --git a/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala b/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
deleted file mode 100644
index 490bc127..00000000
--- a/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package kamon.standalone
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.Kamon
-import kamon.metric.UserMetrics
-import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
-
-import scala.concurrent.duration.FiniteDuration
-
-trait KamonStandalone {
- private[kamon] def system: ActorSystem
-
- def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram =
- Kamon(UserMetrics)(system).registerHistogram(name, precision, highestTrackableValue)
-
- def registerHistogram(name: String): Histogram =
- Kamon(UserMetrics)(system).registerHistogram(name)
-
- def registerCounter(name: String): Counter =
- Kamon(UserMetrics)(system).registerCounter(name)
-
- def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration): MinMaxCounter =
- Kamon(UserMetrics)(system).registerMinMaxCounter(name, precision, highestTrackableValue, refreshInterval)
-
- def registerMinMaxCounter(name: String): MinMaxCounter =
- Kamon(UserMetrics)(system).registerMinMaxCounter(name)
-
- def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
- Kamon(UserMetrics)(system).registerGauge(name)(currentValueCollector)
-
- def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
- Kamon(UserMetrics)(system).registerGauge(name, precision, highestTrackableValue, refreshInterval)(currentValueCollector)
-
- def removeHistogram(name: String): Unit =
- Kamon(UserMetrics)(system).removeHistogram(name)
-
- def removeCounter(name: String): Unit =
- Kamon(UserMetrics)(system).removeCounter(name)
-
- def removeMinMaxCounter(name: String): Unit =
- Kamon(UserMetrics)(system).removeMinMaxCounter(name)
-
- def removeGauge(name: String): Unit =
- Kamon(UserMetrics)(system).removeGauge(name)
-}
-
-object KamonStandalone {
-
- def buildFromConfig(config: Config): KamonStandalone = buildFromConfig(config, "kamon-standalone")
-
- def buildFromConfig(config: Config, actorSystemName: String): KamonStandalone = new KamonStandalone {
- val system: ActorSystem = ActorSystem(actorSystemName, config)
- }
-}
-
-object EmbeddedKamonStandalone extends KamonStandalone {
- private[kamon] lazy val system = ActorSystem("kamon-standalone")
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/Scale.scala b/kamon-core/src/main/scala/kamon/supervisor/AspectJPresent.scala
index 2f27c1a3..0df9539f 100644
--- a/kamon-core/src/main/scala/kamon/metric/Scale.scala
+++ b/kamon-core/src/main/scala/kamon/supervisor/AspectJPresent.scala
@@ -1,6 +1,6 @@
/*
* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
@@ -14,18 +14,18 @@
* =========================================================================================
*/
-package kamon.metric
+package kamon.supervisor
-class Scale(val numericValue: Double) extends AnyVal
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation.{ Around, Aspect, Pointcut }
-object Scale {
- val Nano = new Scale(1E-9)
- val Micro = new Scale(1E-6)
- val Milli = new Scale(1E-3)
- val Unit = new Scale(1)
- val Kilo = new Scale(1E3)
- val Mega = new Scale(1E6)
- val Giga = new Scale(1E9)
+@Aspect
+class AspectJPresent {
+
+ @Pointcut("execution(* kamon.supervisor.KamonSupervisor.isAspectJPresent())")
+ def isAspectJPresentAtModuleSupervisor(): Unit = {}
+
+ @Around("isAspectJPresentAtModuleSupervisor()")
+ def aroundIsAspectJPresentAtModuleSupervisor(pjp: ProceedingJoinPoint): Boolean = true
- def convert(fromUnit: Scale, toUnit: Scale, value: Long): Double = (value * fromUnit.numericValue) / toUnit.numericValue
}
diff --git a/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorExtension.scala b/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorExtension.scala
new file mode 100644
index 00000000..ddce63fb
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorExtension.scala
@@ -0,0 +1,125 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.supervisor
+
+import akka.actor
+import akka.actor._
+import kamon.Kamon
+import kamon.supervisor.KamonSupervisor.CreateModule
+
+import scala.concurrent.{ Promise, Future }
+import scala.util.Success
+
+object ModuleSupervisor extends ExtensionId[ModuleSupervisorExtension] with ExtensionIdProvider {
+ def lookup(): ExtensionId[_ <: actor.Extension] = ModuleSupervisor
+ def createExtension(system: ExtendedActorSystem): ModuleSupervisorExtension = new ModuleSupervisorExtensionImpl(system)
+}
+
+trait ModuleSupervisorExtension extends actor.Extension {
+ def createModule(name: String, props: Props): Future[ActorRef]
+}
+
+class ModuleSupervisorExtensionImpl(system: ExtendedActorSystem) extends ModuleSupervisorExtension {
+ import system.dispatcher
+
+ private val _settings = ModuleSupervisorSettings(system)
+ private val _supervisor = system.actorOf(KamonSupervisor.props(_settings, system.dynamicAccess), "kamon")
+
+ def createModule(name: String, props: Props): Future[ActorRef] = Future {} flatMap { _: Unit ⇒
+ val modulePromise = Promise[ActorRef]()
+ _supervisor ! CreateModule(name, props, modulePromise)
+ modulePromise.future
+ }
+}
+
+class KamonSupervisor(settings: ModuleSupervisorSettings, dynamicAccess: DynamicAccess) extends Actor with ActorLogging {
+
+ init()
+
+ def receive = {
+ case CreateModule(name, props, childPromise) ⇒ createChildModule(name, props, childPromise)
+ }
+
+ def createChildModule(name: String, props: Props, childPromise: Promise[ActorRef]): Unit =
+ context.child(name).map { alreadyAvailableModule ⇒
+ log.warning("Received a request to create module [{}] but the module is already available, returning the existent instance.")
+ childPromise.complete(Success(alreadyAvailableModule))
+
+ } getOrElse (childPromise.complete(Success(context.actorOf(props, name))))
+
+ def init(): Unit = {
+ if (settings.modulesRequiringAspectJ.nonEmpty && !isAspectJPresent && settings.showAspectJMissingWarning)
+ logAspectJWeaverMissing(settings.modulesRequiringAspectJ)
+
+ // Force initialization of all modules marked with auto-start.
+ settings.availableModules.filter(_.autoStart).foreach { module ⇒
+ if (module.extensionClass == "none")
+ log.debug("Ignoring auto start of the [{}] module with no extension class.")
+ else
+ dynamicAccess.getObjectFor[ExtensionId[Kamon.Extension]](module.extensionClass).map { moduleID ⇒
+ moduleID.get(context.system)
+ log.debug("Auto starting the [{}] module.", module.name)
+
+ } recover {
+ case th: Throwable ⇒ log.error(th, "Failed to auto start the [{}] module.", module.name)
+ }
+
+ }
+ }
+
+ // When AspectJ is present the kamon.supervisor.AspectJPresent aspect will make this return true.
+ def isAspectJPresent: Boolean = false
+
+ def logAspectJWeaverMissing(modulesRequiringAspectJ: List[AvailableModuleInfo]): Unit = {
+ val moduleNames = modulesRequiringAspectJ.map(_.name).mkString(", ")
+ val weaverMissingMessage =
+ """
+ |
+ | ___ _ ___ _ _ ___ ___ _ _
+ | / _ \ | | |_ | | | | | | \/ |(_) (_)
+ |/ /_\ \ ___ _ __ ___ ___ | |_ | | | | | | ___ __ _ __ __ ___ _ __ | . . | _ ___ ___ _ _ __ __ _
+ || _ |/ __|| '_ \ / _ \ / __|| __| | | | |/\| | / _ \ / _` |\ \ / // _ \| '__| | |\/| || |/ __|/ __|| || '_ \ / _` |
+ || | | |\__ \| |_) || __/| (__ | |_ /\__/ / \ /\ /| __/| (_| | \ V /| __/| | | | | || |\__ \\__ \| || | | || (_| |
+ |\_| |_/|___/| .__/ \___| \___| \__|\____/ \/ \/ \___| \__,_| \_/ \___||_| \_| |_/|_||___/|___/|_||_| |_| \__, |
+ | | | __/ |
+ | |_| |___/
+ |
+ | It seems like your application was not started with the -javaagent:/path-to-aspectj-weaver.jar option but Kamon detected
+ | the following modules which require AspecJ to work properly:
+ |
+ """.stripMargin + moduleNames +
+ """
+ |
+ | If you need help on setting up the aspectj weaver go to http://kamon.io/introduction/get-started/ for more info. On the
+ | other hand, if you are sure that you do not need or do not want to use the weaver then you can disable this error message
+ | by changing the kamon.show-aspectj-missing-warning setting in your configuration file.
+ |
+ """.stripMargin
+
+ log.error(weaverMissingMessage)
+ }
+
+}
+
+object KamonSupervisor {
+ case class CreateModule(name: String, props: Props, childPromise: Promise[ActorRef])
+
+ def props(settings: ModuleSupervisorSettings, dynamicAccess: DynamicAccess): Props =
+ Props(new KamonSupervisor(settings, dynamicAccess))
+
+}
+
diff --git a/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorSettings.scala b/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorSettings.scala
new file mode 100644
index 00000000..c04157aa
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/supervisor/ModuleSupervisorSettings.scala
@@ -0,0 +1,49 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.supervisor
+
+import akka.actor.ActorSystem
+
+case class AvailableModuleInfo(name: String, extensionClass: String, requiresAspectJ: Boolean, autoStart: Boolean)
+case class ModuleSupervisorSettings(showAspectJMissingWarning: Boolean, availableModules: List[AvailableModuleInfo]) {
+ val modulesRequiringAspectJ = availableModules.filter(_.requiresAspectJ)
+}
+
+object ModuleSupervisorSettings {
+
+ def apply(system: ActorSystem): ModuleSupervisorSettings = {
+ import kamon.util.ConfigTools.Syntax
+
+ val config = system.settings.config.getConfig("kamon.modules")
+ val showAspectJMissingWarning = system.settings.config.getBoolean("kamon.show-aspectj-missing-warning")
+
+ val modules = config.firstLevelKeys
+ val availableModules = modules.map { moduleName ⇒
+ val moduleConfig = config.getConfig(moduleName)
+
+ AvailableModuleInfo(
+ moduleName,
+ moduleConfig.getString("extension-id"),
+ moduleConfig.getBoolean("requires-aspectj"),
+ moduleConfig.getBoolean("auto-start"))
+
+ } toList
+
+ ModuleSupervisorSettings(showAspectJMissingWarning, availableModules)
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/trace/Incubator.scala b/kamon-core/src/main/scala/kamon/trace/Incubator.scala
new file mode 100644
index 00000000..19ea4f39
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/Incubator.scala
@@ -0,0 +1,97 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.util.concurrent.TimeUnit
+
+import akka.actor.{ ActorLogging, Props, Actor, ActorRef }
+import kamon.trace.Incubator.{ CheckForCompletedTraces, IncubatingTrace }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp }
+import scala.annotation.tailrec
+import scala.collection.immutable.Queue
+import scala.concurrent.duration._
+
+class Incubator(subscriptions: ActorRef) extends Actor with ActorLogging {
+ import kamon.util.ConfigTools.Syntax
+ import context.dispatcher
+ val config = context.system.settings.config.getConfig("kamon.trace.incubator")
+
+ val minIncubationTime = new NanoInterval(config.getFiniteDuration("min-incubation-time").toNanos)
+ val maxIncubationTime = new NanoInterval(config.getFiniteDuration("max-incubation-time").toNanos)
+ val checkInterval = config.getFiniteDuration("check-interval")
+
+ val checkSchedule = context.system.scheduler.schedule(checkInterval, checkInterval, self, CheckForCompletedTraces)
+ var waitingForMinimumIncubation = Queue.empty[IncubatingTrace]
+ var waitingForIncubationFinish = List.empty[IncubatingTrace]
+
+ def receive = {
+ case tc: TracingContext ⇒ incubate(tc)
+ case CheckForCompletedTraces ⇒
+ checkWaitingForMinimumIncubation()
+ checkWaitingForIncubationFinish()
+ }
+
+ def incubate(tc: TracingContext): Unit =
+ waitingForMinimumIncubation = waitingForMinimumIncubation.enqueue(IncubatingTrace(tc, RelativeNanoTimestamp.now))
+
+ @tailrec private def checkWaitingForMinimumIncubation(): Unit = {
+ if (waitingForMinimumIncubation.nonEmpty) {
+ val it = waitingForMinimumIncubation.head
+ if (NanoInterval.since(it.incubationStart) >= minIncubationTime) {
+ waitingForMinimumIncubation = waitingForMinimumIncubation.tail
+
+ if (it.tc.shouldIncubate)
+ waitingForIncubationFinish = it :: waitingForIncubationFinish
+ else
+ dispatchTraceInfo(it.tc)
+
+ checkWaitingForMinimumIncubation()
+ }
+ }
+ }
+
+ private def checkWaitingForIncubationFinish(): Unit = {
+ waitingForIncubationFinish = waitingForIncubationFinish.filter {
+ case IncubatingTrace(context, incubationStart) ⇒
+ if (!context.shouldIncubate) {
+ dispatchTraceInfo(context)
+ false
+ } else {
+ if (NanoInterval.since(incubationStart) >= maxIncubationTime) {
+ log.warning("Trace [{}] with token [{}] has reached the maximum incubation time, will be reported as is.", context.name, context.token)
+ dispatchTraceInfo(context);
+ false
+ } else true
+ }
+ }
+ }
+
+ def dispatchTraceInfo(tc: TracingContext): Unit = subscriptions ! tc.generateTraceInfo
+
+ override def postStop(): Unit = {
+ super.postStop()
+ checkSchedule.cancel()
+ }
+}
+
+object Incubator {
+
+ def props(subscriptions: ActorRef): Props = Props(new Incubator(subscriptions))
+
+ case object CheckForCompletedTraces
+ case class IncubatingTrace(tc: TracingContext, incubationStart: RelativeNanoTimestamp)
+}
diff --git a/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala b/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala
new file mode 100644
index 00000000..5f7fdff5
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala
@@ -0,0 +1,120 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.util.concurrent.ConcurrentLinkedQueue
+
+import akka.event.LoggingAdapter
+import kamon.metric.{ MetricsExtension, TraceMetrics }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp }
+
+import scala.annotation.tailrec
+
+private[kamon] class MetricsOnlyContext(traceName: String, val token: String, izOpen: Boolean, val levelOfDetail: LevelOfDetail,
+ val startTimestamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension)
+ extends TraceContext {
+
+ @volatile private var _name = traceName
+ @volatile private var _isOpen = izOpen
+ @volatile protected var _elapsedTime = NanoInterval.default
+
+ private val _finishedSegments = new ConcurrentLinkedQueue[SegmentLatencyData]()
+ private val _traceLocalStorage = new TraceLocalStorage
+
+ def rename(newName: String): Unit =
+ if (isOpen)
+ _name = newName
+ else if (log.isWarningEnabled)
+ log.warning("Can't rename trace from [{}] to [{}] because the trace is already closed.", name, newName)
+
+ def name: String = _name
+ def isEmpty: Boolean = false
+ def isOpen: Boolean = _isOpen
+ def addMetadata(key: String, value: String): Unit = {}
+
+ def finish(): Unit = {
+ _isOpen = false
+ val traceElapsedTime = NanoInterval.since(startTimestamp)
+ _elapsedTime = traceElapsedTime
+
+ metricsExtension.register(TraceMetrics, name).map { registration ⇒
+ registration.recorder.ElapsedTime.record(traceElapsedTime.nanos)
+ drainFinishedSegments(registration.recorder)
+ }
+ }
+
+ def startSegment(segmentName: String, category: String, library: String): Segment =
+ new MetricsOnlySegment(segmentName, category, library)
+
+ @tailrec private def drainFinishedSegments(recorder: TraceMetrics): Unit = {
+ val segment = _finishedSegments.poll()
+ if (segment != null) {
+ recorder.segment(segment.name, segment.category, segment.library).record(segment.duration.nanos)
+ drainFinishedSegments(recorder)
+ }
+ }
+
+ protected def finishSegment(segmentName: String, category: String, library: String, duration: NanoInterval): Unit = {
+ _finishedSegments.add(SegmentLatencyData(segmentName, category, library, duration))
+
+ if (isClosed) {
+ metricsExtension.register(TraceMetrics, name).map { registration ⇒
+ drainFinishedSegments(registration.recorder)
+ }
+ }
+ }
+
+ // Should only be used by the TraceLocal utilities.
+ def traceLocalStorage: TraceLocalStorage = _traceLocalStorage
+
+ // Handle with care and make sure that the trace is closed before calling this method, otherwise NanoInterval.default
+ // will be returned.
+ def elapsedTime: NanoInterval = _elapsedTime
+
+ class MetricsOnlySegment(segmentName: String, val category: String, val library: String) extends Segment {
+ private val _startTimestamp = RelativeNanoTimestamp.now
+ @volatile private var _segmentName = segmentName
+ @volatile private var _elapsedTime = NanoInterval.default
+ @volatile private var _isOpen = true
+
+ def name: String = _segmentName
+ def isEmpty: Boolean = false
+ def addMetadata(key: String, value: String): Unit = {}
+ def isOpen: Boolean = _isOpen
+
+ def rename(newName: String): Unit =
+ if (isOpen)
+ _segmentName = newName
+ else if (log.isWarningEnabled)
+ log.warning("Can't rename segment from [{}] to [{}] because the segment is already closed.", name, newName)
+
+ def finish: Unit = {
+ _isOpen = false
+ val segmentElapsedTime = NanoInterval.since(_startTimestamp)
+ _elapsedTime = segmentElapsedTime
+
+ finishSegment(name, category, library, segmentElapsedTime)
+ }
+
+ // Handle with care and make sure that the segment is closed before calling this method, otherwise
+ // NanoInterval.default will be returned.
+ def elapsedTime: NanoInterval = _elapsedTime
+ def startTimestamp: RelativeNanoTimestamp = _startTimestamp
+ }
+}
+
+case class SegmentLatencyData(name: String, category: String, library: String, duration: NanoInterval)
diff --git a/kamon-core/src/main/scala/kamon/trace/Sampler.scala b/kamon-core/src/main/scala/kamon/trace/Sampler.scala
new file mode 100644
index 00000000..827840d7
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/Sampler.scala
@@ -0,0 +1,73 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import kamon.util.{ NanoInterval, Sequencer }
+import scala.concurrent.duration.FiniteDuration
+import scala.concurrent.forkjoin.ThreadLocalRandom
+
+trait Sampler {
+ def shouldTrace: Boolean
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean
+}
+
+object NoSampling extends Sampler {
+ def shouldTrace: Boolean = false
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean = false
+}
+
+object SampleAll extends Sampler {
+ def shouldTrace: Boolean = true
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean = true
+}
+
+class RandomSampler(chance: Int) extends Sampler {
+ require(chance > 0, "kamon.trace.random-sampler.chance cannot be <= 0")
+ require(chance <= 100, "kamon.trace.random-sampler.chance cannot be > 100")
+
+ def shouldTrace: Boolean = ThreadLocalRandom.current().nextInt(100) <= chance
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean = true
+}
+
+class OrderedSampler(interval: Int) extends Sampler {
+ import OrderedSampler._
+
+ require(interval > 0, "kamon.trace.ordered-sampler.interval cannot be <= 0")
+ assume(interval isPowerOfTwo, "kamon.trace.ordered-sampler.interval must be power of two")
+
+ private val sequencer = Sequencer()
+
+ def shouldTrace: Boolean = (sequencer.next() fastMod interval) == 0
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean = true
+}
+
+object OrderedSampler {
+ implicit class EnhancedInt(i: Int) {
+ def isPowerOfTwo = (i & (i - 1)) == 0
+ }
+
+ implicit class EnhancedLong(dividend: Long) {
+ def fastMod(divisor: Int) = dividend & (divisor - 1)
+ }
+}
+
+class ThresholdSampler(threshold: FiniteDuration) extends Sampler {
+
+ def shouldTrace: Boolean = true
+ def shouldReport(traceElapsedTime: NanoInterval): Boolean = traceElapsedTime.nanos >= threshold.toNanos
+}
+
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
index 5b74e6b2..48e56153 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
@@ -17,146 +17,113 @@
package kamon.trace
import java.io.ObjectStreamException
-
-import akka.actor.ActorSystem
-import kamon.Kamon
-import kamon.metric._
-import java.util.concurrent.ConcurrentLinkedQueue
import kamon.trace.TraceContextAware.DefaultTraceContextAware
-import kamon.metric.TraceMetrics.TraceMetricRecorder
-
-import scala.annotation.tailrec
+import kamon.util.RelativeNanoTimestamp
-sealed trait TraceContext {
+trait TraceContext {
def name: String
def token: String
- def rename(name: String): Unit
- def finish(): Unit
- def origin: TraceContextOrigin
- def isOpen: Boolean
- def isClosed: Boolean = !isOpen
def isEmpty: Boolean
def nonEmpty: Boolean = !isEmpty
+ def isOpen: Boolean
+ def isClosed: Boolean = !isOpen
+
+ def finish(): Unit
+ def rename(newName: String): Unit
+
def startSegment(segmentName: String, category: String, library: String): Segment
- def nanoTimestamp: Long
+ def addMetadata(key: String, value: String)
+
+ def startTimestamp: RelativeNanoTimestamp
}
-sealed trait Segment {
+object TraceContext {
+ private[kamon] val _traceContextStorage = new ThreadLocal[TraceContext] {
+ override def initialValue(): TraceContext = EmptyTraceContext
+ }
+
+ def currentContext: TraceContext =
+ _traceContextStorage.get()
+
+ def setCurrentContext(context: TraceContext): Unit =
+ _traceContextStorage.set(context)
+
+ def clearCurrentContext: Unit =
+ _traceContextStorage.remove()
+
+ def withContext[T](context: TraceContext)(code: ⇒ T): T = {
+ val oldContext = _traceContextStorage.get()
+ _traceContextStorage.set(context)
+
+ try code finally _traceContextStorage.set(oldContext)
+ }
+
+ def map[T](f: TraceContext ⇒ T): Option[T] = {
+ val current = currentContext
+ if (current.nonEmpty)
+ Some(f(current))
+ else None
+ }
+
+}
+
+trait Segment {
def name: String
- def rename(newName: String): Unit
def category: String
def library: String
- def finish(): Unit
def isEmpty: Boolean
+ def nonEmpty: Boolean = !isEmpty
+ def isOpen: Boolean
+ def isClosed: Boolean = !isOpen
+
+ def finish(): Unit
+ def rename(newName: String): Unit
+ def addMetadata(key: String, value: String)
}
case object EmptyTraceContext extends TraceContext {
def name: String = "empty-trace"
def token: String = ""
- def rename(name: String): Unit = {}
- def finish(): Unit = {}
- def origin: TraceContextOrigin = TraceContextOrigin.Local
- def isOpen: Boolean = false
def isEmpty: Boolean = true
+ def isOpen: Boolean = false
+
+ def finish(): Unit = {}
+ def rename(name: String): Unit = {}
def startSegment(segmentName: String, category: String, library: String): Segment = EmptySegment
- def nanoTimestamp: Long = 0L
+ def addMetadata(key: String, value: String): Unit = {}
+ def startTimestamp = new RelativeNanoTimestamp(0L)
case object EmptySegment extends Segment {
val name: String = "empty-segment"
val category: String = "empty-category"
val library: String = "empty-library"
def isEmpty: Boolean = true
- def rename(newName: String): Unit = {}
- def finish: Unit = {}
- }
-}
-
-class DefaultTraceContext(traceName: String, val token: String, izOpen: Boolean, val levelOfDetail: LevelOfDetail,
- val origin: TraceContextOrigin, nanoTimeztamp: Long, val system: ActorSystem) extends TraceContext {
-
- val isEmpty: Boolean = false
- @volatile private var _name = traceName
- @volatile private var _isOpen = izOpen
-
- private val _nanoTimestamp = nanoTimeztamp
- private val finishedSegments = new ConcurrentLinkedQueue[SegmentData]()
- private val metricsExtension = Kamon(Metrics)(system)
- private[kamon] val traceLocalStorage: TraceLocalStorage = new TraceLocalStorage
-
- def name: String = _name
- def rename(newName: String): Unit =
- if (isOpen) _name = newName // TODO: log a warning about renaming a closed trace.
-
- def isOpen: Boolean = _isOpen
- def nanoTimestamp: Long = _nanoTimestamp
-
- def finish(): Unit = {
- _isOpen = false
- val elapsedNanoTime = System.nanoTime() - _nanoTimestamp
- val metricRecorder = metricsExtension.register(TraceMetrics(name), TraceMetrics.Factory)
-
- metricRecorder.map { traceMetrics ⇒
- traceMetrics.elapsedTime.record(elapsedNanoTime)
- drainFinishedSegments(traceMetrics)
- }
- }
-
- def startSegment(segmentName: String, category: String, library: String): Segment = new DefaultSegment(segmentName, category, library)
+ def isOpen: Boolean = false
- @tailrec private def drainFinishedSegments(metricRecorder: TraceMetricRecorder): Unit = {
- val segment = finishedSegments.poll()
- if (segment != null) {
- metricRecorder.segmentRecorder(segment.identity).record(segment.duration)
- drainFinishedSegments(metricRecorder)
- }
- }
-
- private def finishSegment(segmentName: String, category: String, library: String, duration: Long): Unit = {
- finishedSegments.add(SegmentData(SegmentMetricIdentity(segmentName, category, library), duration))
-
- if (isClosed) {
- metricsExtension.register(TraceMetrics(name), TraceMetrics.Factory).map { traceMetrics ⇒
- drainFinishedSegments(traceMetrics)
- }
- }
- }
-
- class DefaultSegment(segmentName: String, val category: String, val library: String) extends Segment {
- private val _segmentStartNanoTime = System.nanoTime()
- @volatile private var _segmentName = segmentName
- @volatile private var _isOpen = true
-
- def name: String = _segmentName
- def rename(newName: String): Unit = _segmentName = newName
- def isEmpty: Boolean = false
-
- def finish: Unit = {
- val segmentFinishNanoTime = System.nanoTime()
- finishSegment(name, category, library, (segmentFinishNanoTime - _segmentStartNanoTime))
- }
+ def finish: Unit = {}
+ def rename(newName: String): Unit = {}
+ def addMetadata(key: String, value: String): Unit = {}
}
}
-case class SegmentMetricIdentity(name: String, category: String, library: String) extends MetricIdentity
-case class SegmentData(identity: SegmentMetricIdentity, duration: Long)
-
object SegmentCategory {
val HttpClient = "http-client"
+ val Database = "database"
+}
+
+class LOD private[trace] (val level: Int) extends AnyVal
+object LOD {
+ val MetricsOnly = new LOD(1)
+ val SimpleTrace = new LOD(2)
}
sealed trait LevelOfDetail
object LevelOfDetail {
- case object OnlyMetrics extends LevelOfDetail
+ case object MetricsOnly extends LevelOfDetail
case object SimpleTrace extends LevelOfDetail
case object FullTrace extends LevelOfDetail
}
-sealed trait TraceContextOrigin
-object TraceContextOrigin {
- case object Local extends TraceContextOrigin
- case object Remote extends TraceContextOrigin
-}
-
trait TraceContextAware extends Serializable {
def traceContext: TraceContext
}
@@ -165,7 +132,7 @@ object TraceContextAware {
def default: TraceContextAware = new DefaultTraceContextAware
class DefaultTraceContextAware extends TraceContextAware {
- @transient val traceContext = TraceRecorder.currentContext
+ @transient val traceContext = TraceContext.currentContext
//
// Beware of this hack, it might bite us in the future!
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala b/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala
deleted file mode 100644
index a59abc18..00000000
--- a/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.trace
-
-import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
-import akka.actor
-import kamon.util.GlobPathFilter
-import kamon.Kamon
-
-class TraceExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- val config = system.settings.config.getConfig("kamon.trace")
- val enableAskPatternTracing = config.getBoolean("ask-pattern-tracing")
-}
-
-object Trace extends ExtensionId[TraceExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Trace
- def createExtension(system: ExtendedActorSystem): TraceExtension = new TraceExtension(system)
-
- case class MetricGroupFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
- def accept(name: String): Boolean = includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala b/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
index 0766af74..057f564e 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
@@ -16,23 +16,43 @@
package kamon.trace
-import scala.collection.concurrent.TrieMap
import kamon.trace.TraceLocal.TraceLocalKey
+import scala.collection.concurrent.TrieMap
+
object TraceLocal {
+
trait TraceLocalKey {
type ValueType
}
- def store(key: TraceLocalKey)(value: key.ValueType): Unit = TraceRecorder.currentContext match {
- case ctx: DefaultTraceContext ⇒ ctx.traceLocalStorage.store(key)(value)
- case EmptyTraceContext ⇒ // Can't store in the empty context.
+ trait AvailableToMdc extends TraceLocalKey {
+ override type ValueType = String
+ def mdcKey: String
+ }
+
+ object AvailableToMdc {
+ case class DefaultKeyAvailableToMdc(mdcKey: String) extends AvailableToMdc
+
+ def fromKey(mdcKey: String): AvailableToMdc = DefaultKeyAvailableToMdc(mdcKey)
+ def apply(mdcKey: String): AvailableToMdc = fromKey(mdcKey)
+ }
+
+ case class HttpContext(agent: String, uri: String, xforwarded: String)
+
+ object HttpContextKey extends TraceLocal.TraceLocalKey { type ValueType = HttpContext }
+
+ def store(key: TraceLocalKey)(value: key.ValueType): Unit = TraceContext.currentContext match {
+ case ctx: MetricsOnlyContext ⇒ ctx.traceLocalStorage.store(key)(value)
+ case EmptyTraceContext ⇒ // Can't store in the empty context.
}
- def retrieve(key: TraceLocalKey): Option[key.ValueType] = TraceRecorder.currentContext match {
- case ctx: DefaultTraceContext ⇒ ctx.traceLocalStorage.retrieve(key)
- case EmptyTraceContext ⇒ None // Can't retrieve anything from the empty context.
+ def retrieve(key: TraceLocalKey): Option[key.ValueType] = TraceContext.currentContext match {
+ case ctx: MetricsOnlyContext ⇒ ctx.traceLocalStorage.retrieve(key)
+ case EmptyTraceContext ⇒ None // Can't retrieve anything from the empty context.
}
+
+ def storeForMdc(key: String, value: String): Unit = store(AvailableToMdc.fromKey(key))(value)
}
class TraceLocalStorage {
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala b/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
deleted file mode 100644
index 8da187cb..00000000
--- a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.trace
-
-import scala.language.experimental.macros
-import java.util.concurrent.atomic.AtomicLong
-import kamon.macros.InlineTraceContextMacro
-
-import scala.util.Try
-import java.net.InetAddress
-import akka.actor.ActorSystem
-
-object TraceRecorder {
- private val traceContextStorage = new ThreadLocal[TraceContext] {
- override def initialValue(): TraceContext = EmptyTraceContext
- }
-
- private val tokenCounter = new AtomicLong
- private val hostnamePrefix = Try(InetAddress.getLocalHost.getHostName).getOrElse("unknown-localhost")
-
- def newToken: String = hostnamePrefix + "-" + String.valueOf(tokenCounter.incrementAndGet())
-
- private def newTraceContext(name: String, token: Option[String], system: ActorSystem): TraceContext = {
- new DefaultTraceContext(
- name,
- token.getOrElse(newToken),
- izOpen = true,
- LevelOfDetail.OnlyMetrics,
- TraceContextOrigin.Local,
- nanoTimeztamp = System.nanoTime,
- system)
- }
-
- def joinRemoteTraceContext(traceName: String, traceToken: String, startMilliTime: Long, isOpen: Boolean, system: ActorSystem): TraceContext = {
- val equivalentNanotime = System.nanoTime() - ((System.currentTimeMillis() - startMilliTime) * 1000000)
- new DefaultTraceContext(
- traceName,
- traceToken,
- isOpen,
- LevelOfDetail.OnlyMetrics,
- TraceContextOrigin.Remote,
- equivalentNanotime,
- system)
- }
-
- def setContext(context: TraceContext): Unit = traceContextStorage.set(context)
-
- def clearContext: Unit = traceContextStorage.set(EmptyTraceContext)
-
- def currentContext: TraceContext = traceContextStorage.get()
-
- def start(name: String, token: Option[String] = None)(implicit system: ActorSystem) = {
- val ctx = newTraceContext(name, token, system)
- traceContextStorage.set(ctx)
- }
-
- def rename(name: String): Unit = currentContext.rename(name)
-
- def withNewTraceContext[T](name: String, token: Option[String] = None)(thunk: ⇒ T)(implicit system: ActorSystem): T =
- withTraceContext(newTraceContext(name, token, system))(thunk)
-
- def withTraceContext[T](context: TraceContext)(thunk: ⇒ T): T = {
- val oldContext = currentContext
- setContext(context)
-
- try thunk finally setContext(oldContext)
- }
-
- def withTraceContextAndSystem[T](thunk: (TraceContext, ActorSystem) ⇒ T): Option[T] = currentContext match {
- case ctx: DefaultTraceContext ⇒ Some(thunk(ctx, ctx.system))
- case EmptyTraceContext ⇒ None
- }
-
- def withInlineTraceContextReplacement[T](traceCtx: TraceContext)(thunk: ⇒ T): T = macro InlineTraceContextMacro.withInlineTraceContextImpl[T, TraceContext]
-
- def finish(): Unit = currentContext.finish()
-
-}
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceSubscriptions.scala b/kamon-core/src/main/scala/kamon/trace/TraceSubscriptions.scala
new file mode 100644
index 00000000..f2da404c
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TraceSubscriptions.scala
@@ -0,0 +1,45 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import akka.actor.{ Terminated, ActorRef, Actor }
+
+class TraceSubscriptions extends Actor {
+ import TraceSubscriptions._
+
+ var subscribers: List[ActorRef] = Nil
+
+ def receive = {
+ case Subscribe(newSubscriber) ⇒
+ if (!subscribers.contains(newSubscriber))
+ subscribers = context.watch(newSubscriber) :: subscribers
+
+ case Unsubscribe(leavingSubscriber) ⇒
+ subscribers = subscribers.filter(_ == leavingSubscriber)
+
+ case Terminated(terminatedSubscriber) ⇒
+ subscribers = subscribers.filter(_ == terminatedSubscriber)
+
+ case trace: TraceInfo ⇒
+ subscribers.foreach(_ ! trace)
+ }
+}
+
+object TraceSubscriptions {
+ case class Subscribe(subscriber: ActorRef)
+ case class Unsubscribe(subscriber: ActorRef)
+}
diff --git a/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala b/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala
new file mode 100644
index 00000000..be565154
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala
@@ -0,0 +1,110 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.net.InetAddress
+import java.util.concurrent.atomic.AtomicLong
+
+import akka.actor._
+import com.typesafe.config.Config
+import kamon.metric.MetricsExtension
+import kamon.util._
+
+import scala.util.Try
+
+trait TracerExtension {
+ def newContext(name: String): TraceContext
+ def newContext(name: String, token: String): TraceContext
+ def newContext(name: String, token: String, timestamp: RelativeNanoTimestamp, isOpen: Boolean, isLocal: Boolean): TraceContext
+
+ def subscribe(subscriber: ActorRef): Unit
+ def unsubscribe(subscriber: ActorRef): Unit
+}
+
+private[kamon] class TracerExtensionImpl(metricsExtension: MetricsExtension, config: Config) extends TracerExtension {
+ private val _settings = TraceSettings(config)
+ private val _hostnamePrefix = Try(InetAddress.getLocalHost.getHostName).getOrElse("unknown-localhost")
+ private val _tokenCounter = new AtomicLong
+
+ private val _subscriptions = new LazyActorRef
+ private val _incubator = new LazyActorRef
+
+ private def newToken: String =
+ _hostnamePrefix + "-" + String.valueOf(_tokenCounter.incrementAndGet())
+
+ def newContext(name: String): TraceContext =
+ createTraceContext(name)
+
+ def newContext(name: String, token: String): TraceContext =
+ createTraceContext(name, token)
+
+ def newContext(name: String, token: String, timestamp: RelativeNanoTimestamp, isOpen: Boolean, isLocal: Boolean): TraceContext =
+ createTraceContext(name, token, timestamp, isOpen, isLocal)
+
+ private def createTraceContext(traceName: String, token: String = newToken, startTimestamp: RelativeNanoTimestamp = RelativeNanoTimestamp.now,
+ isOpen: Boolean = true, isLocal: Boolean = true): TraceContext = {
+
+ def newMetricsOnlyContext = new MetricsOnlyContext(traceName, token, isOpen, _settings.levelOfDetail, startTimestamp, null, metricsExtension)
+
+ if (_settings.levelOfDetail == LevelOfDetail.MetricsOnly || !isLocal)
+ newMetricsOnlyContext
+ else {
+ if (!_settings.sampler.shouldTrace)
+ newMetricsOnlyContext
+ else
+ new TracingContext(traceName, token, true, _settings.levelOfDetail, isLocal, startTimestamp, null, metricsExtension, this, dispatchTracingContext)
+ }
+ }
+
+ def subscribe(subscriber: ActorRef): Unit =
+ _subscriptions.tell(TraceSubscriptions.Subscribe(subscriber))
+
+ def unsubscribe(subscriber: ActorRef): Unit =
+ _subscriptions.tell(TraceSubscriptions.Unsubscribe(subscriber))
+
+ private[kamon] def dispatchTracingContext(trace: TracingContext): Unit =
+ if (_settings.sampler.shouldReport(trace.elapsedTime))
+ if (trace.shouldIncubate)
+ _incubator.tell(trace)
+ else
+ _subscriptions.tell(trace.generateTraceInfo)
+
+ /**
+ * Tracer Extension initialization.
+ */
+ private var _system: ActorSystem = null
+ private lazy val _start = {
+ val subscriptions = _system.actorOf(Props[TraceSubscriptions], "trace-subscriptions")
+ _subscriptions.point(subscriptions)
+ _incubator.point(_system.actorOf(Incubator.props(subscriptions)))
+ }
+
+ def start(system: ActorSystem): Unit = synchronized {
+ _system = system
+ _start
+ _system = null
+ }
+}
+
+private[kamon] object TracerExtensionImpl {
+
+ def apply(metricsExtension: MetricsExtension, config: Config) =
+ new TracerExtensionImpl(metricsExtension, config)
+}
+
+case class TraceInfo(name: String, token: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String], segments: List[SegmentInfo])
+case class SegmentInfo(name: String, category: String, library: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String]) \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala b/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala
new file mode 100644
index 00000000..79f30f23
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala
@@ -0,0 +1,46 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import com.typesafe.config.Config
+
+case class TraceSettings(levelOfDetail: LevelOfDetail, sampler: Sampler)
+
+object TraceSettings {
+ import kamon.util.ConfigTools.Syntax
+
+ def apply(config: Config): TraceSettings = {
+ val tracerConfig = config.getConfig("kamon.trace")
+
+ val detailLevel: LevelOfDetail = tracerConfig.getString("level-of-detail") match {
+ case "metrics-only" ⇒ LevelOfDetail.MetricsOnly
+ case "simple-trace" ⇒ LevelOfDetail.SimpleTrace
+ case other ⇒ sys.error(s"Unknown tracer level of detail [$other] present in the configuration file.")
+ }
+
+ val sampler: Sampler =
+ if (detailLevel == LevelOfDetail.MetricsOnly) NoSampling
+ else tracerConfig.getString("sampling") match {
+ case "all" ⇒ SampleAll
+ case "random" ⇒ new RandomSampler(tracerConfig.getInt("random-sampler.chance"))
+ case "ordered" ⇒ new OrderedSampler(tracerConfig.getInt("ordered-sampler.interval"))
+ case "threshold" ⇒ new ThresholdSampler(tracerConfig.getFiniteDuration("threshold-sampler.minimum-elapsed-time"))
+ }
+
+ TraceSettings(detailLevel, sampler)
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TracingContext.scala b/kamon-core/src/main/scala/kamon/trace/TracingContext.scala
new file mode 100644
index 00000000..3d324886
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TracingContext.scala
@@ -0,0 +1,92 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.atomic.AtomicInteger
+
+import akka.event.LoggingAdapter
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp, NanoTimestamp }
+import kamon.metric.MetricsExtension
+
+import scala.collection.concurrent.TrieMap
+
+private[trace] class TracingContext(traceName: String, token: String, izOpen: Boolean, levelOfDetail: LevelOfDetail,
+ isLocal: Boolean, startTimeztamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension,
+ traceExtension: TracerExtensionImpl, traceInfoSink: TracingContext ⇒ Unit)
+ extends MetricsOnlyContext(traceName, token, izOpen, levelOfDetail, startTimeztamp, log, metricsExtension) {
+
+ private val _openSegments = new AtomicInteger(0)
+ private val _startTimestamp = NanoTimestamp.now
+ private val _allSegments = new ConcurrentLinkedQueue[TracingSegment]()
+ private val _metadata = TrieMap.empty[String, String]
+
+ override def addMetadata(key: String, value: String): Unit = _metadata.put(key, value)
+
+ override def startSegment(segmentName: String, category: String, library: String): Segment = {
+ _openSegments.incrementAndGet()
+ val newSegment = new TracingSegment(segmentName, category, library)
+ _allSegments.add(newSegment)
+ newSegment
+ }
+
+ override def finish(): Unit = {
+ super.finish()
+ traceInfoSink(this)
+ }
+
+ override def finishSegment(segmentName: String, category: String, library: String, duration: NanoInterval): Unit = {
+ _openSegments.decrementAndGet()
+ super.finishSegment(segmentName, category, library, duration)
+ }
+
+ def shouldIncubate: Boolean = isOpen || _openSegments.get() > 0
+
+ // Handle with care, should only be used after a trace is finished.
+ def generateTraceInfo: TraceInfo = {
+ require(isClosed, "Can't generated a TraceInfo if the Trace has not closed yet.")
+
+ val currentSegments = _allSegments.iterator()
+ var segmentsInfo = List.newBuilder[SegmentInfo]
+
+ while (currentSegments.hasNext()) {
+ val segment = currentSegments.next()
+ if (segment.isClosed)
+ segmentsInfo += segment.createSegmentInfo(_startTimestamp, startTimestamp)
+ else
+ log.warning("Segment [{}] will be left out of TraceInfo because it was still open.", segment.name)
+ }
+
+ TraceInfo(name, token, _startTimestamp, elapsedTime, _metadata.toMap, segmentsInfo.result())
+ }
+
+ class TracingSegment(segmentName: String, category: String, library: String) extends MetricsOnlySegment(segmentName, category, library) {
+ private val metadata = TrieMap.empty[String, String]
+ override def addMetadata(key: String, value: String): Unit = metadata.put(key, value)
+
+ // Handle with care, should only be used after the segment has finished.
+ def createSegmentInfo(traceStartTimestamp: NanoTimestamp, traceRelativeTimestamp: RelativeNanoTimestamp): SegmentInfo = {
+ require(isClosed, "Can't generated a SegmentInfo if the Segment has not closed yet.")
+
+ // We don't have a epoch-based timestamp for the segments because calling System.currentTimeMillis() is both
+ // expensive and inaccurate, but we can do that once for the trace and calculate all the segments relative to it.
+ val segmentStartTimestamp = new NanoTimestamp((this.startTimestamp.nanos - traceRelativeTimestamp.nanos) + traceStartTimestamp.nanos)
+
+ SegmentInfo(this.name, category, library, segmentStartTimestamp, this.elapsedTime, metadata.toMap)
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala b/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
index f052f009..961c3099 100644
--- a/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
+++ b/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
@@ -17,11 +17,11 @@ package kamon.trace.logging
import ch.qos.logback.classic.pattern.ClassicConverter
import ch.qos.logback.classic.spi.ILoggingEvent
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
class LogbackTraceTokenConverter extends ClassicConverter {
def convert(event: ILoggingEvent): String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
if (ctx.isEmpty)
"undefined"
else
diff --git a/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala b/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala
new file mode 100644
index 00000000..4970d97e
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala
@@ -0,0 +1,39 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace.logging
+
+import kamon.trace.TraceLocal.AvailableToMdc
+import kamon.trace.{ EmptyTraceContext, MetricsOnlyContext, TraceContext }
+
+import org.slf4j.MDC
+
+trait MdcKeysSupport {
+
+ def withMdc[A](thunk: ⇒ A): A = {
+ val keys = copyToMdc(TraceContext.currentContext)
+ try thunk finally keys.foreach(key ⇒ MDC.remove(key))
+ }
+
+ private[this] def copyToMdc(traceContext: TraceContext): Iterable[String] = traceContext match {
+ case ctx: MetricsOnlyContext ⇒
+ ctx.traceLocalStorage.underlyingStorage.collect {
+ case (available: AvailableToMdc, value) ⇒ Map(available.mdcKey -> String.valueOf(value))
+ }.map { value ⇒ value.map { case (k, v) ⇒ MDC.put(k, v); k } }.flatten
+
+ case EmptyTraceContext ⇒ Iterable.empty[String]
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/util/ConfigTools.scala b/kamon-core/src/main/scala/kamon/util/ConfigTools.scala
new file mode 100644
index 00000000..483278bf
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/ConfigTools.scala
@@ -0,0 +1,42 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import java.util.concurrent.TimeUnit
+
+import com.typesafe.config.Config
+
+import scala.concurrent.duration.FiniteDuration
+
+object ConfigTools {
+ implicit class Syntax(val config: Config) extends AnyVal {
+ // We are using the deprecated .getNanoseconds option to keep Kamon source code compatible with
+ // versions of Akka using older typesafe-config versions.
+
+ def getFiniteDuration(path: String): FiniteDuration =
+ FiniteDuration(config.getNanoseconds(path), TimeUnit.NANOSECONDS)
+
+ def firstLevelKeys: Set[String] = {
+ import scala.collection.JavaConverters._
+
+ config.entrySet().asScala.map {
+ case entry ⇒ entry.getKey.takeWhile(_ != '.')
+ } toSet
+ }
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/util/FastDispatch.scala b/kamon-core/src/main/scala/kamon/util/FastDispatch.scala
new file mode 100644
index 00000000..d2748847
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/FastDispatch.scala
@@ -0,0 +1,38 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import akka.actor.ActorRef
+
+import scala.concurrent.{ ExecutionContext, Future }
+
+/**
+ * Extension for Future[ActorRef]. Try to dispatch a message to a Future[ActorRef] in the same thread if it has already
+ * completed or do the regular scheduling otherwise. Specially useful when using the ModuleSupervisor extension to
+ * create actors.
+ */
+object FastDispatch {
+ implicit class Syntax(val target: Future[ActorRef]) extends AnyVal {
+
+ def fastDispatch(message: Any)(implicit ec: ExecutionContext): Unit =
+ if (target.isCompleted)
+ target.value.get.map(_ ! message)
+ else
+ target.map(_ ! message)
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/util/LazyActorRef.scala b/kamon-core/src/main/scala/kamon/util/LazyActorRef.scala
new file mode 100644
index 00000000..855bf1fc
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/LazyActorRef.scala
@@ -0,0 +1,53 @@
+package kamon.util
+
+import java.util
+import java.util.concurrent.ConcurrentLinkedQueue
+
+import akka.actor.{ Actor, ActorRef }
+import org.HdrHistogram.WriterReaderPhaser
+
+import scala.annotation.tailrec
+
+/**
+ * A LazyActorRef accumulates messages sent to an actor that doesn't exist yet. Once the actor is created and
+ * the LazyActorRef is pointed to it, all the accumulated messages are flushed and any new message sent to the
+ * LazyActorRef will immediately be sent to the pointed ActorRef.
+ *
+ * This is intended to be used during Kamon's initialization where some components need to use ActorRefs to work
+ * (like subscriptions and the trace incubator) but our internal ActorSystem is not yet ready to create the
+ * required actors.
+ */
+class LazyActorRef {
+ private val _refPhaser = new WriterReaderPhaser
+ private val _backlog = new ConcurrentLinkedQueue[(Any, ActorRef)]()
+ @volatile private var _target: Option[ActorRef] = None
+
+ def tell(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = {
+ val criticalEnter = _refPhaser.writerCriticalSectionEnter()
+ try {
+ _target.map(_.tell(message, sender)) getOrElse {
+ _backlog.add((message, sender))
+ }
+
+ } finally { _refPhaser.writerCriticalSectionExit(criticalEnter) }
+ }
+
+ def point(target: ActorRef): Unit = {
+ @tailrec def drain(q: util.Queue[(Any, ActorRef)]): Unit = if (!q.isEmpty) {
+ val (msg, sender) = q.poll()
+ target.tell(msg, sender)
+ drain(q)
+ }
+
+ try {
+ _refPhaser.readerLock()
+
+ if (_target.isEmpty) {
+ _target = Some(target)
+ _refPhaser.flipPhase(1000L)
+ drain(_backlog)
+
+ } else sys.error("A LazyActorRef cannot be pointed more than once.")
+ } finally { _refPhaser.readerUnlock() }
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/util/MapMerge.scala b/kamon-core/src/main/scala/kamon/util/MapMerge.scala
new file mode 100644
index 00000000..64b4f7ae
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/MapMerge.scala
@@ -0,0 +1,43 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+object MapMerge {
+
+ /**
+ * Merge to immutable maps with the same key and value types, using the provided valueMerge function.
+ */
+ implicit class Syntax[K, V](val map: Map[K, V]) extends AnyVal {
+ def merge(that: Map[K, V], valueMerge: (V, V) ⇒ V): Map[K, V] = {
+ val merged = Map.newBuilder[K, V]
+
+ map.foreach {
+ case (key, value) ⇒
+ val mergedValue = that.get(key).map(v ⇒ valueMerge(value, v)).getOrElse(value)
+ merged += key -> mergedValue
+ }
+
+ that.foreach {
+ case kv @ (key, _) if !map.contains(key) ⇒ merged += kv
+ case other ⇒ // ignore, already included.
+ }
+
+ merged.result();
+ }
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/util/PaddedAtomicLong.scala b/kamon-core/src/main/scala/kamon/util/PaddedAtomicLong.scala
index 9c926372..9019eb4c 100644
--- a/kamon-core/src/main/scala/kamon/util/PaddedAtomicLong.scala
+++ b/kamon-core/src/main/scala/kamon/util/PaddedAtomicLong.scala
@@ -1,4 +1,3 @@
-package kamon.util
/*
* =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
@@ -15,6 +14,8 @@ package kamon.util
* =========================================================================================
*/
+package kamon.util
+
import java.util.concurrent.atomic.AtomicLong
class PaddedAtomicLong(value: Long = 0) extends AtomicLong(value) {
diff --git a/kamon-core/src/main/scala/kamon/util/Sequencer.scala b/kamon-core/src/main/scala/kamon/util/Sequencer.scala
new file mode 100644
index 00000000..f368e54f
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/Sequencer.scala
@@ -0,0 +1,40 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+/**
+ * This class implements an extremely efficient, thread-safe way to generate a
+ * incrementing sequence of Longs with a simple Long overflow protection.
+ */
+class Sequencer {
+ private val CloseToOverflow = Long.MaxValue - 1000000000
+ private val sequenceNumber = new PaddedAtomicLong(1L)
+
+ def next(): Long = {
+ val current = sequenceNumber.getAndIncrement
+
+ // check if this value is getting close to overflow?
+ if (current > CloseToOverflow) {
+ sequenceNumber.set(current - CloseToOverflow) // we need maintain the order
+ }
+ current
+ }
+}
+
+object Sequencer {
+ def apply(): Sequencer = new Sequencer()
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/util/Timestamp.scala b/kamon-core/src/main/scala/kamon/util/Timestamp.scala
new file mode 100644
index 00000000..eadc6690
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/Timestamp.scala
@@ -0,0 +1,101 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+/**
+ * Epoch time stamp.
+ */
+class Timestamp(val seconds: Long) extends AnyVal {
+ def <(that: Timestamp): Boolean = this.seconds < that.seconds
+ def >(that: Timestamp): Boolean = this.seconds > that.seconds
+ def ==(that: Timestamp): Boolean = this.seconds == that.seconds
+ def >=(that: Timestamp): Boolean = this.seconds >= that.seconds
+ def <=(that: Timestamp): Boolean = this.seconds <= that.seconds
+
+ override def toString: String = String.valueOf(seconds) + ".seconds"
+}
+
+object Timestamp {
+ def now: Timestamp = new Timestamp(System.currentTimeMillis() / 1000)
+ def earlier(l: Timestamp, r: Timestamp): Timestamp = if (l <= r) l else r
+ def later(l: Timestamp, r: Timestamp): Timestamp = if (l >= r) l else r
+}
+
+/**
+ * Epoch time stamp in milliseconds.
+ */
+class MilliTimestamp(val millis: Long) extends AnyVal {
+ override def toString: String = String.valueOf(millis) + ".millis"
+
+ def toTimestamp: Timestamp = new Timestamp(millis / 1000)
+ def toRelativeNanoTimestamp: RelativeNanoTimestamp = {
+ val diff = (System.currentTimeMillis() - millis) * 1000000
+ new RelativeNanoTimestamp(System.nanoTime() - diff)
+ }
+}
+
+object MilliTimestamp {
+ def now: MilliTimestamp = new MilliTimestamp(System.currentTimeMillis())
+}
+
+/**
+ * Epoch time stamp in nanoseconds.
+ *
+ * NOTE: This doesn't have any better precision than MilliTimestamp, it is just a convenient way to get a epoch
+ * timestamp in nanoseconds.
+ */
+class NanoTimestamp(val nanos: Long) extends AnyVal {
+ override def toString: String = String.valueOf(nanos) + ".nanos"
+}
+
+object NanoTimestamp {
+ def now: NanoTimestamp = new NanoTimestamp(System.currentTimeMillis() * 1000000)
+}
+
+/**
+ * Number of nanoseconds between a arbitrary origin timestamp provided by the JVM via System.nanoTime()
+ */
+class RelativeNanoTimestamp(val nanos: Long) extends AnyVal {
+ override def toString: String = String.valueOf(nanos) + ".nanos"
+
+ def toMilliTimestamp: MilliTimestamp =
+ new MilliTimestamp(System.currentTimeMillis - ((System.nanoTime - nanos) / 1000000))
+}
+
+object RelativeNanoTimestamp {
+ def now: RelativeNanoTimestamp = new RelativeNanoTimestamp(System.nanoTime())
+ def relativeTo(milliTimestamp: MilliTimestamp): RelativeNanoTimestamp =
+ new RelativeNanoTimestamp(now.nanos - (MilliTimestamp.now.millis - milliTimestamp.millis) * 1000000)
+}
+
+/**
+ * Number of nanoseconds that passed between two points in time.
+ */
+class NanoInterval(val nanos: Long) extends AnyVal {
+ def <(that: NanoInterval): Boolean = this.nanos < that.nanos
+ def >(that: NanoInterval): Boolean = this.nanos > that.nanos
+ def ==(that: NanoInterval): Boolean = this.nanos == that.nanos
+ def >=(that: NanoInterval): Boolean = this.nanos >= that.nanos
+ def <=(that: NanoInterval): Boolean = this.nanos <= that.nanos
+
+ override def toString: String = String.valueOf(nanos) + ".nanos"
+}
+
+object NanoInterval {
+ def default: NanoInterval = new NanoInterval(0L)
+ def since(relative: RelativeNanoTimestamp): NanoInterval = new NanoInterval(System.nanoTime() - relative.nanos)
+}
diff --git a/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala b/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala
new file mode 100644
index 00000000..d1197a5a
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala
@@ -0,0 +1,45 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.util
+
+import scala.collection.concurrent.TrieMap
+
+object TriemapAtomicGetOrElseUpdate {
+
+ /**
+ * Workaround to the non thread-safe [[scala.collection.concurrent.TrieMap#getOrElseUpdate]] method. More details on
+ * why this is necessary can be found at [[https://issues.scala-lang.org/browse/SI-7943]].
+ */
+ implicit class Syntax[K, V](val trieMap: TrieMap[K, V]) extends AnyVal {
+
+ def atomicGetOrElseUpdate(key: K, op: ⇒ V): V =
+ atomicGetOrElseUpdate(key, op, { v: V ⇒ Unit })
+
+ def atomicGetOrElseUpdate(key: K, op: ⇒ V, cleanup: V ⇒ Unit): V =
+ trieMap.get(key) match {
+ case Some(v) ⇒ v
+ case None ⇒
+ val d = op
+ trieMap.putIfAbsent(key, d).map { oldValue ⇒
+ // If there was an old value then `d` was never added
+ // and thus need to be cleanup.
+ cleanup(d)
+ oldValue
+
+ } getOrElse (d)
+ }
+ }
+}
diff --git a/kamon-core/src/test/resources/logback.xml b/kamon-core/src/test/resources/logback.xml
index 2ae1e3bd..dd623d61 100644
--- a/kamon-core/src/test/resources/logback.xml
+++ b/kamon-core/src/test/resources/logback.xml
@@ -1,12 +1,17 @@
<configuration scan="true">
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>%date{HH:mm:ss.SSS} %-5level [%X{uow}][%X{requestId}] [%thread] %logger{55} - %msg%n</pattern>
- </encoder>
- </appender>
+ <contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
+ <resetJUL>true</resetJUL>
+ </contextListener>
- <root level="debug">
- <appender-ref ref="STDOUT" />
- </root>
+ <conversionRule conversionWord="traceToken" converterClass="kamon.trace.logging.LogbackTraceTokenConverter"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%date{HH:mm:ss.SSS} %-5level [%traceToken][%thread] %logger{55} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="error">
+ <appender-ref ref="STDOUT"/>
+ </root>
</configuration>
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala
deleted file mode 100644
index 3dab44bc..00000000
--- a/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* ===================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ========================================================== */
-package kamon.instrumentation.akka
-
-import akka.actor.{ Actor, ActorLogging, ActorSystem, Props }
-import akka.event.Logging.LogEvent
-import akka.testkit.TestKit
-import kamon.trace.{ TraceContextAware, TraceRecorder }
-import org.scalatest.{ Inspectors, Matchers, WordSpecLike }
-
-class ActorLoggingInstrumentationSpec extends TestKit(ActorSystem("actor-logging-instrumentation-spec")) with WordSpecLike
- with Matchers with Inspectors {
-
- "the ActorLogging instrumentation" should {
- "attach the TraceContext (if available) to log events" in {
- val loggerActor = system.actorOf(Props[LoggerActor])
- system.eventStream.subscribe(testActor, classOf[LogEvent])
-
- val testTraceContext = TraceRecorder.withNewTraceContext("logging") {
- loggerActor ! "info"
- TraceRecorder.currentContext
- }
-
- fishForMessage() {
- case event: LogEvent if event.message.toString contains "TraceContext =>" ⇒
- val ctxInEvent = event.asInstanceOf[TraceContextAware].traceContext
- ctxInEvent === testTraceContext
-
- case event: LogEvent ⇒ false
- }
- }
- }
-}
-
-class LoggerActor extends Actor with ActorLogging {
- def receive = {
- case "info" ⇒ log.info("TraceContext => {}", TraceRecorder.currentContext)
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
deleted file mode 100644
index 17312ba3..00000000
--- a/kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.instrumentation.akka
-
-import akka.actor.{ Actor, ActorSystem, Props }
-import akka.event.Logging.Warning
-import akka.pattern.ask
-import akka.testkit.TestKitBase
-import akka.util.Timeout
-import com.typesafe.config.ConfigFactory
-import kamon.trace.{ TraceContextAware, TraceRecorder }
-import org.scalatest.{ Matchers, WordSpecLike }
-
-import scala.concurrent.duration._
-
-class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("ask-pattern-tracing-spec", ConfigFactory.parseString(
- """
- |kamon {
- | trace {
- | ask-pattern-tracing = on
- | }
- |}
- """.stripMargin))
-
- "the AskPatternTracing" should {
- "log a warning with a stack trace and TraceContext taken from the moment the ask was triggered" in {
- implicit val ec = system.dispatcher
- implicit val timeout = Timeout(10 milliseconds)
- val noReply = system.actorOf(Props[NoReply])
- system.eventStream.subscribe(testActor, classOf[Warning])
-
- val testTraceContext = TraceRecorder.withNewTraceContext("ask-timeout-warning") {
- noReply ? "hello"
- TraceRecorder.currentContext
- }
-
- val warn = expectMsgPF() {
- case warn: Warning if warn.message.toString.contains("Timeout triggered for ask pattern") ⇒ warn
- }
- val capturedCtx = warn.asInstanceOf[TraceContextAware].traceContext
-
- capturedCtx should equal(testTraceContext)
- }
- }
-}
-
-class NoReply extends Actor {
- def receive = {
- case any ⇒
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala
deleted file mode 100644
index 245901cd..00000000
--- a/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala
+++ /dev/null
@@ -1,223 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import java.nio.LongBuffer
-
-import akka.instrumentation.ActorCellMetrics
-import kamon.Kamon
-import kamon.metric.ActorMetricsTestActor._
-import org.scalatest.{ WordSpecLike, Matchers }
-import akka.testkit.{ ImplicitSender, TestProbe, TestKitBase }
-import akka.actor._
-import com.typesafe.config.ConfigFactory
-import scala.concurrent.duration._
-import kamon.metric.ActorMetrics.{ ActorMetricsRecorder, ActorMetricSnapshot }
-
-class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | actor {
- | includes = [ "user/tracked-*", "user/measuring-*", "user/clean-after-collect", "user/stop" ]
- | excludes = [ "user/tracked-explicitly-excluded"]
- | }
- | }
- | ]
- | precision.actor {
- | processing-time {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | time-in-mailbox {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | mailbox-size {
- | refresh-interval = 1 hour
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
-
- "the Kamon actor metrics" should {
- "respect the configured include and exclude filters" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("tracked-actor")
- actorMetricsRecorderOf(trackedActor) should not be empty
-
- val nonTrackedActor = createTestActor("non-tracked-actor")
- actorMetricsRecorderOf(nonTrackedActor) shouldBe empty
-
- val trackedButExplicitlyExcluded = createTestActor("tracked-explicitly-excluded")
- actorMetricsRecorderOf(trackedButExplicitlyExcluded) shouldBe empty
- }
-
- "reset all recording instruments after taking a snapshot" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("clean-after-collect")
- val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
- for (i ← 1 to 100) {
- trackedActor ! Discard
- }
- trackedActor ! Fail
- trackedActor ! TrackTimings(sleep = Some(1 second))
- expectMsgType[TrackedTimings]
-
- val firstSnapshot = takeSnapshotOf(trackedActorMetrics)
- firstSnapshot.errors.count should be(1L)
- firstSnapshot.mailboxSize.numberOfMeasurements should be > 0L
- firstSnapshot.processingTime.numberOfMeasurements should be(103L) // 102 examples + Initialize message
- firstSnapshot.timeInMailbox.numberOfMeasurements should be(103L) // 102 examples + Initialize message
-
- val secondSnapshot = takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
- secondSnapshot.errors.count should be(0L)
- secondSnapshot.mailboxSize.numberOfMeasurements should be(3L) // min, max and current
- secondSnapshot.processingTime.numberOfMeasurements should be(0L)
- secondSnapshot.timeInMailbox.numberOfMeasurements should be(0L)
- }
-
- "record the processing-time of the receive function" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("measuring-processing-time")
- val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
- takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
-
- trackedActor ! TrackTimings(sleep = Some(1 second))
- val timings = expectMsgType[TrackedTimings]
- val snapshot = takeSnapshotOf(trackedActorMetrics)
-
- snapshot.processingTime.numberOfMeasurements should be(1L)
- snapshot.processingTime.recordsIterator.next().count should be(1L)
- snapshot.processingTime.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
- }
-
- "record the number of errors" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("measuring-errors")
- val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
- takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
-
- for (i ← 1 to 10) { trackedActor ! Fail }
- trackedActor ! Ping
- expectMsg(Pong)
- val snapshot = takeSnapshotOf(trackedActorMetrics)
-
- snapshot.errors.count should be(10)
- }
-
- "record the mailbox-size" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("measuring-mailbox-size")
- val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
- takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
-
- trackedActor ! TrackTimings(sleep = Some(1 second))
- for (i ← 1 to 10) {
- trackedActor ! Discard
- }
- trackedActor ! Ping
-
- val timings = expectMsgType[TrackedTimings]
- expectMsg(Pong)
- val snapshot = takeSnapshotOf(trackedActorMetrics)
-
- snapshot.mailboxSize.min should be(0L)
- snapshot.mailboxSize.max should be(11L +- 1L)
- }
-
- "record the time-in-mailbox" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("measuring-time-in-mailbox")
- val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
- takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
-
- trackedActor ! TrackTimings(sleep = Some(1 second))
- val timings = expectMsgType[TrackedTimings]
- val snapshot = takeSnapshotOf(trackedActorMetrics)
-
- snapshot.timeInMailbox.numberOfMeasurements should be(1L)
- snapshot.timeInMailbox.recordsIterator.next().count should be(1L)
- snapshot.timeInMailbox.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
- }
-
- "clean up the associated recorder when the actor is stopped" in new ActorMetricsFixtures {
- val trackedActor = createTestActor("stop")
- actorMetricsRecorderOf(trackedActor).get // force the actor to be initialized
- Kamon(Metrics).storage.get(ActorMetrics("user/stop")) should not be empty
-
- val deathWatcher = TestProbe()
- deathWatcher.watch(trackedActor)
- trackedActor ! PoisonPill
- deathWatcher.expectTerminated(trackedActor)
-
- Kamon(Metrics).storage.get(ActorMetrics("user/stop")) shouldBe empty
- }
- }
-
- trait ActorMetricsFixtures {
- val collectionContext = new CollectionContext {
- val buffer: LongBuffer = LongBuffer.allocate(10000)
- }
-
- def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetricsRecorder] = {
- val initialisationListener = TestProbe()
- ref.tell(Ping, initialisationListener.ref)
- initialisationListener.expectMsg(Pong)
-
- val underlyingCellField = ref.getClass.getDeclaredMethod("underlying")
- val cell = underlyingCellField.invoke(ref).asInstanceOf[ActorCellMetrics]
-
- cell.actorMetricsRecorder
- }
-
- def createTestActor(name: String): ActorRef = system.actorOf(Props[ActorMetricsTestActor], name)
-
- def takeSnapshotOf(amr: ActorMetricsRecorder): ActorMetricSnapshot = amr.collect(collectionContext)
- }
-}
-
-class ActorMetricsTestActor extends Actor {
- def receive = {
- case Discard ⇒
- case Fail ⇒ throw new ArithmeticException("Division by zero.")
- case Ping ⇒ sender ! Pong
- case TrackTimings(sendTimestamp, sleep) ⇒ {
- val dequeueTimestamp = System.nanoTime()
- sleep.map(s ⇒ Thread.sleep(s.toMillis))
- val afterReceiveTimestamp = System.nanoTime()
-
- sender ! TrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
- }
- }
-}
-
-object ActorMetricsTestActor {
- case object Ping
- case object Pong
- case object Fail
- case object Discard
-
- case class TrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
- case class TrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
- def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
- def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
deleted file mode 100644
index ae324b73..00000000
--- a/kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import org.scalatest.{ WordSpecLike, Matchers }
-import akka.testkit.{ TestProbe, TestKitBase }
-import akka.actor.{ ActorRef, Props, ActorSystem }
-import com.typesafe.config.ConfigFactory
-import scala.concurrent.duration._
-import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.DispatcherMetrics.DispatcherMetricSnapshot
-
-class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("dispatcher-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 second
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | dispatcher {
- | includes = ["*"]
- | excludes = ["dispatcher-explicitly-excluded"]
- | }
- | }
- | ]
- |}
- |
- |dispatcher-explicitly-excluded {
- | type = "Dispatcher"
- | executor = "fork-join-executor"
- |}
- |
- |tracked-dispatcher {
- | type = "Dispatcher"
- | executor = "thread-pool-executor"
- |}
- |
- """.stripMargin))
-
- "the Kamon dispatcher metrics" should {
- "respect the configured include and exclude filters" in {
- system.actorOf(Props[ActorMetricsTestActor].withDispatcher("tracked-dispatcher"), "actor-with-tracked-dispatcher")
- system.actorOf(Props[ActorMetricsTestActor].withDispatcher("dispatcher-explicitly-excluded"), "actor-with-excluded-dispatcher")
-
- Kamon(Metrics).subscribe(DispatcherMetrics, "*", testActor, permanently = true)
- expectMsgType[TickMetricSnapshot]
-
- within(2 seconds) {
- val tickSnapshot = expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.keys should contain(DispatcherMetrics("tracked-dispatcher"))
- tickSnapshot.metrics.keys should not contain (DispatcherMetrics("dispatcher-explicitly-excluded"))
- }
- }
-
- "record maximumPoolSize, runningThreadCount, queueTaskCount, poolSize metrics" in new DelayableActorFixture {
- val (delayable, metricsListener) = delayableActor("worker-actor", "tracked-dispatcher")
-
- for (_ ← 1 to 100) {
- //delayable ! Discard
- }
-
- val dispatcherMetrics = expectDispatcherMetrics("tracked-dispatcher", metricsListener, 3 seconds)
- dispatcherMetrics.maximumPoolSize.max should be <= 64L //fail in travis
- dispatcherMetrics.poolSize.max should be <= 22L //fail in travis
- dispatcherMetrics.queueTaskCount.max should be(0L)
- dispatcherMetrics.runningThreadCount.max should be(0L)
- }
-
- }
-
- def expectDispatcherMetrics(dispatcherId: String, listener: TestProbe, waitTime: FiniteDuration): DispatcherMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val dispatcherMetricsOption = tickSnapshot.metrics.get(DispatcherMetrics(dispatcherId))
- dispatcherMetricsOption should not be empty
- dispatcherMetricsOption.get.asInstanceOf[DispatcherMetricSnapshot]
- }
-
- trait DelayableActorFixture {
- def delayableActor(name: String, dispatcher: String): (ActorRef, TestProbe) = {
- val actor = system.actorOf(Props[ActorMetricsTestActor].withDispatcher(dispatcher), name)
- val metricsListener = TestProbe()
-
- Kamon(Metrics).subscribe(DispatcherMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
-
- (actor, metricsListener)
- }
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/metric/RouterMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/RouterMetricsSpec.scala
deleted file mode 100644
index 2d56f1bb..00000000
--- a/kamon-core/src/test/scala/kamon/metric/RouterMetricsSpec.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import java.nio.LongBuffer
-
-import akka.actor._
-import akka.routing.RoundRobinRouter
-import akka.testkit.{ TestProbe, ImplicitSender, TestKitBase }
-import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.RouterMetrics._
-import kamon.metric.RouterMetricsTestActor._
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.instrument.{ Counter, Histogram }
-import org.scalatest.{ Matchers, WordSpecLike }
-
-import scala.concurrent.duration._
-
-class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
-
- implicit lazy val system: ActorSystem = ActorSystem("router-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 second
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | router {
- | includes = [ "user/tracked-*", "user/measuring-*", "user/stop" ]
- | excludes = [ "user/tracked-explicitly-excluded"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
-
- "the Kamon router metrics" should {
- "respect the configured include and exclude filters" in new RouterMetricsFixtures {
- createTestRouter("tracked-router")
- createTestRouter("non-tracked-router")
- createTestRouter("tracked-explicitly-excluded")
-
- Kamon(Metrics).subscribe(RouterMetrics, "*", testActor, permanently = true)
- expectMsgType[TickMetricSnapshot]
-
- within(2 seconds) {
- val tickSnapshot = expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.keys should contain(RouterMetrics("user/tracked-router"))
- tickSnapshot.metrics.keys should not contain (RouterMetrics("user/non-tracked-router"))
- tickSnapshot.metrics.keys should not contain (RouterMetrics("user/tracked-explicitly-excluded"))
- }
- }
-
- "record the processing-time of the receive function" in new RouterMetricsFixtures {
- val metricsListener = TestProbe()
- val trackedRouter = createTestRouter("measuring-processing-time")
-
- trackedRouter.tell(RouterTrackTimings(sleep = Some(1 second)), metricsListener.ref)
- val timings = metricsListener.expectMsgType[RouterTrackedTimings]
-
- val tickSnapshot = expectMsgType[TickMetricSnapshot].metrics
- tickSnapshot(RouterMetrics("user/measuring-processing-time")).metrics(ProcessingTime).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1L)
- tickSnapshot(RouterMetrics("user/measuring-processing-time")).metrics(ProcessingTime).asInstanceOf[Histogram.Snapshot].recordsIterator.next().count should be(1L)
- // tickSnapshot(RouterMetrics("user/measuring-processing-time")).metrics(ProcessingTime).asInstanceOf[Histogram.Snapshot].recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
- }
-
- "record the number of errors" in new RouterMetricsFixtures {
- val metricsListener = TestProbe()
- val trackedRouter = createTestRouter("measuring-errors")
-
- for (i ← 1 to 10) {
- trackedRouter.tell(Fail, metricsListener.ref)
- }
- val tickSnapshot = expectMsgType[TickMetricSnapshot].metrics
- tickSnapshot(RouterMetrics("user/measuring-errors")).metrics(Errors).asInstanceOf[Counter.Snapshot].count should be(10L)
- }
-
- "record the time-in-mailbox" in new RouterMetricsFixtures {
- val metricsListener = TestProbe()
- val trackedRouter = createTestRouter("measuring-time-in-mailbox")
-
- trackedRouter.tell(RouterTrackTimings(sleep = Some(1 second)), metricsListener.ref)
- val timings = metricsListener.expectMsgType[RouterTrackedTimings]
- val tickSnapshot = expectMsgType[TickMetricSnapshot].metrics
-
- tickSnapshot(RouterMetrics("user/measuring-time-in-mailbox")).metrics(TimeInMailbox).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1L)
- tickSnapshot(RouterMetrics("user/measuring-time-in-mailbox")).metrics(TimeInMailbox).asInstanceOf[Histogram.Snapshot].recordsIterator.next().count should be(1L)
- tickSnapshot(RouterMetrics("user/measuring-time-in-mailbox")).metrics(TimeInMailbox).asInstanceOf[Histogram.Snapshot].recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
- }
-
- "clean up the associated recorder when the actor is stopped" in new RouterMetricsFixtures {
- val trackedRouter = createTestRouter("stop")
- trackedRouter ! Ping
- Kamon(Metrics).storage.toString() // force to be initialized
- Kamon(Metrics).storage.get(RouterMetrics("user/stop")) should not be empty
-
- val deathWatcher = TestProbe()
- deathWatcher.watch(trackedRouter)
- trackedRouter ! PoisonPill
- deathWatcher.expectTerminated(trackedRouter)
-
- Kamon(Metrics).storage.get(RouterMetrics("user/stop")) shouldBe empty
- }
- }
-
- trait RouterMetricsFixtures {
- val collectionContext = new CollectionContext {
- val buffer: LongBuffer = LongBuffer.allocate(10000)
- }
-
- def createTestRouter(name: String): ActorRef = system.actorOf(Props[RouterMetricsTestActor]
- .withRouter(RoundRobinRouter(nrOfInstances = 5)), name)
- }
-}
-
-class RouterMetricsTestActor extends Actor {
- def receive = {
- case Discard ⇒
- case Fail ⇒ throw new ArithmeticException("Division by zero.")
- case Ping ⇒ sender ! Pong
- case RouterTrackTimings(sendTimestamp, sleep) ⇒ {
- val dequeueTimestamp = System.nanoTime()
- sleep.map(s ⇒ Thread.sleep(s.toMillis))
- val afterReceiveTimestamp = System.nanoTime()
-
- sender ! RouterTrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
- }
- }
-}
-
-object RouterMetricsTestActor {
- case object Ping
- case object Pong
- case object Fail
- case object Discard
-
- case class RouterTrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
- case class RouterTrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
- def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
- def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
index 60923a2b..370a49f9 100644
--- a/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
@@ -1,129 +1,127 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric
import akka.actor._
-import akka.testkit.{ TestProbe, ImplicitSender, TestKitBase }
+import akka.testkit.{ TestProbe, ImplicitSender }
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class SubscriptionsProtocolSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("subscriptions-protocol-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- |}
- """.stripMargin))
+class SubscriptionsProtocolSpec extends BaseKamonSpec("subscriptions-protocol-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ """.stripMargin)
- val metricsExtension = Kamon(Metrics)(system)
- import metricsExtension.{ register, subscribe, unsubscribe }
+ lazy val metricsModule = Kamon.metrics
+ import metricsModule.{ register, subscribe, unsubscribe }
"the Subscriptions messaging protocol" should {
"allow subscribing for a single tick" in {
val subscriber = TestProbe()
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = false)
+ register(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = false)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace"))
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
subscriber.expectNoMsg(1 second)
}
"allow subscribing permanently to a metric" in {
val subscriber = TestProbe()
- register(TraceMetrics("permanent"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "permanent", subscriber.ref, permanently = true)
+ register(TraceMetrics, "permanent")
+ subscribe("trace", "permanent", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("permanent"))
- subscriber.expectNoMsg(1 second)
+ tickSnapshot.metrics.keys should contain(Entity("permanent", "trace"))
}
}
"allow subscribing to metrics matching a glob pattern" in {
val subscriber = TestProbe()
- register(TraceMetrics("include-one"), TraceMetrics.Factory)
- register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
- register(TraceMetrics("include-three"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "include-*", subscriber.ref, permanently = true)
+ register(TraceMetrics, "include-one")
+ register(TraceMetrics, "exclude-two")
+ register(TraceMetrics, "include-three")
+ subscribe("trace", "include-*", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(2)
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
- subscriber.expectNoMsg(1 second)
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace"))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace"))
}
}
"send a single TickMetricSnapshot to each subscriber, even if subscribed multiple times" in {
val subscriber = TestProbe()
- register(TraceMetrics("include-one"), TraceMetrics.Factory)
- register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
- register(TraceMetrics("include-three"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "include-one", subscriber.ref, permanently = true)
- subscribe(TraceMetrics, "include-three", subscriber.ref, permanently = true)
+ register(TraceMetrics, "include-one")
+ register(TraceMetrics, "exclude-two")
+ register(TraceMetrics, "include-three")
+ subscribe("trace", "include-one", subscriber.ref, permanently = true)
+ subscribe("trace", "include-three", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(2)
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace"))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace"))
}
}
"allow un-subscribing a subscriber" in {
val subscriber = TestProbe()
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = true)
+ register(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = true)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace"))
unsubscribe(subscriber.ref)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
subscriber.expectNoMsg(1 second)
}
+ }
- "watch all subscribers and un-subscribe them if they die" in {
- val subscriber = TestProbe()
- val forwarderSubscriber = system.actorOf(Props(new ForwarderSubscriber(subscriber.ref)))
- watch(forwarderSubscriber)
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", forwarderSubscriber, permanently = true)
-
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
-
- forwarderSubscriber ! PoisonPill
- expectTerminated(forwarderSubscriber)
-
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- subscriber.expectNoMsg(2 seconds)
- }
+ def subscriptionsActor: ActorRef = {
+ val listener = TestProbe()
+ system.actorSelection("/user/kamon/kamon-metrics").tell(Identify(1), listener.ref)
+ listener.expectMsgType[ActorIdentity].ref.get
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
index 27eb64e2..76f25f10 100644
--- a/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
@@ -18,32 +18,29 @@ package kamon.metric
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.metric.instrument.Histogram
import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
-import akka.testkit.{ ImplicitSender, TestKitBase }
-import akka.actor.ActorSystem
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
+import akka.testkit.ImplicitSender
import scala.concurrent.duration._
-import kamon.metric.Subscriptions.TickMetricSnapshot
-
-class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- |}
- """.stripMargin))
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+
+class TickMetricSnapshotBufferSpec extends BaseKamonSpec("trace-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace" ]
+ | }
+ | }
+ |}
+ """.stripMargin)
"the TickMetricSnapshotBuffer" should {
"merge TickMetricSnapshots received until the flush timeout is reached and fix the from/to fields" in new SnapshotFixtures {
@@ -56,8 +53,8 @@ class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Ma
within(2 seconds)(expectNoMsg())
val mergedSnapshot = expectMsgType[TickMetricSnapshot]
- mergedSnapshot.from should equal(1000)
- mergedSnapshot.to should equal(4000)
+ mergedSnapshot.from.millis should equal(1000)
+ mergedSnapshot.to.millis should equal(4000)
mergedSnapshot.metrics should be('empty)
}
@@ -71,11 +68,11 @@ class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Ma
within(2 seconds)(expectNoMsg())
val mergedSnapshot = expectMsgType[TickMetricSnapshot]
- mergedSnapshot.from should equal(1000)
- mergedSnapshot.to should equal(4000)
+ mergedSnapshot.from.millis should equal(1000)
+ mergedSnapshot.to.millis should equal(4000)
mergedSnapshot.metrics should not be ('empty)
- val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).metrics(TraceMetrics.ElapsedTime).asInstanceOf[Histogram.Snapshot]
+ val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).histogram("elapsed-time").get
testMetricSnapshot.min should equal(10)
testMetricSnapshot.max should equal(300)
testMetricSnapshot.numberOfMeasurements should equal(6)
@@ -89,24 +86,24 @@ class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Ma
}
trait SnapshotFixtures {
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- val testTraceIdentity = TraceMetrics("buffer-spec-test-trace")
- val traceRecorder = Kamon(Metrics).register(testTraceIdentity, TraceMetrics.Factory).get
-
- val firstEmpty = TickMetricSnapshot(1000, 2000, Map.empty)
- val secondEmpty = TickMetricSnapshot(2000, 3000, Map.empty)
- val thirdEmpty = TickMetricSnapshot(3000, 4000, Map.empty)
-
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(20L)
- traceRecorder.elapsedTime.record(30L)
- val firstNonEmpty = TickMetricSnapshot(1000, 2000, Map(
+ val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ val testTraceIdentity = Entity("buffer-spec-test-trace", "trace")
+ val traceRecorder = Kamon.metrics.register(TraceMetrics, "buffer-spec-test-trace").get.recorder
+
+ val firstEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map.empty)
+ val secondEmpty = TickMetricSnapshot(new MilliTimestamp(2000), new MilliTimestamp(3000), Map.empty)
+ val thirdEmpty = TickMetricSnapshot(new MilliTimestamp(3000), new MilliTimestamp(4000), Map.empty)
+
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(20L)
+ traceRecorder.ElapsedTime.record(30L)
+ val firstNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
(testTraceIdentity -> traceRecorder.collect(collectionContext))))
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(300L)
- val secondNonEmpty = TickMetricSnapshot(1000, 2000, Map(
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(300L)
+ val secondNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
(testTraceIdentity -> traceRecorder.collect(collectionContext))))
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
index 301ea9b2..3a9c8ca3 100644
--- a/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
@@ -1,93 +1,98 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric
-import akka.actor.ActorSystem
-import akka.testkit.{ ImplicitSender, TestKitBase }
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.trace.{ SegmentMetricIdentity, TraceRecorder }
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import kamon.metric.instrument.Histogram
+
+class TraceMetricsSpec extends BaseKamonSpec("trace-metrics-spec") {
+ import TraceMetricsSpec.SegmentSyntax
-class TraceMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 second
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ |}
+ """.stripMargin)
"the TraceMetrics" should {
"record the elapsed time between a trace creation and finish" in {
for (repetitions ← 1 to 10) {
- TraceRecorder.withNewTraceContext("record-elapsed-time") {
- TraceRecorder.finish()
+ TraceContext.withContext(newContext("record-elapsed-time")) {
+ TraceContext.currentContext.finish()
}
}
- val snapshot = takeSnapshotOf("record-elapsed-time")
- snapshot.elapsedTime.numberOfMeasurements should be(10)
- snapshot.segments shouldBe empty
+ val snapshot = takeSnapshotOf("record-elapsed-time", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(10)
}
"record the elapsed time for segments that occur inside a given trace" in {
- TraceRecorder.withNewTraceContext("trace-with-segments") {
- val segment = TraceRecorder.currentContext.startSegment("test-segment", "test-category", "test-library")
+ TraceContext.withContext(newContext("trace-with-segments")) {
+ val segment = TraceContext.currentContext.startSegment("test-segment", "test-category", "test-library")
segment.finish()
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("trace-with-segments")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
+ val snapshot = takeSnapshotOf("trace-with-segments", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("test-segment", "test-category", "test-library")).numberOfMeasurements should be(1)
+ snapshot.segment("test-segment", "test-category", "test-library").numberOfMeasurements should be(1)
}
"record the elapsed time for segments that finish after their correspondent trace has finished" in {
- val segment = TraceRecorder.withNewTraceContext("closing-segment-after-trace") {
- val s = TraceRecorder.currentContext.startSegment("test-segment", "test-category", "test-library")
- TraceRecorder.finish()
+ val segment = TraceContext.withContext(newContext("closing-segment-after-trace")) {
+ val s = TraceContext.currentContext.startSegment("test-segment", "test-category", "test-library")
+ TraceContext.currentContext.finish()
s
}
- val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
- beforeFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(1)
+ val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace", "trace")
+ beforeFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
beforeFinishSegmentSnapshot.segments.size should be(0)
segment.finish()
- val afterFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
- afterFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(0)
+ val afterFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace", "trace")
+ afterFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(0)
afterFinishSegmentSnapshot.segments.size should be(1)
- afterFinishSegmentSnapshot.segments(SegmentMetricIdentity("test-segment", "test-category", "test-library")).numberOfMeasurements should be(1)
+ afterFinishSegmentSnapshot.segment("test-segment", "test-category", "test-library").numberOfMeasurements should be(1)
}
}
+}
+
+object TraceMetricsSpec {
+ implicit class SegmentSyntax(val entitySnapshot: EntitySnapshot) extends AnyVal {
+ def segments: Map[HistogramKey, Histogram.Snapshot] = {
+ entitySnapshot.histograms.filterKeys(_.metadata.contains("category"))
+ }
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
+ def segment(name: String, category: String, library: String): Histogram.Snapshot =
+ segments(TraceMetrics.segmentKey(name, category, library))
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
index e072d3ef..455518f8 100644
--- a/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
@@ -1,311 +1,127 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric
-import akka.actor.{ Props, ActorSystem }
-import akka.testkit.{ ImplicitSender, TestKitBase }
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.UserMetrics._
-import kamon.metric.instrument.{ Histogram, Counter, MinMaxCounter, Gauge }
-import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class UserMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 10000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 hour
- | highest-trackable-value = 1000
- | significant-value-digits = 2
- | }
- |
- | default-gauge-precision {
- | refresh-interval = 1 hour
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class UserMetricsSpec extends BaseKamonSpec("user-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |}
+ """.stripMargin)
"the UserMetrics extension" should {
+
"allow registering a fully configured Histogram and get the same Histogram if registering again" in {
- val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+ val histogramA = Kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = Kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
histogramA shouldBe theSameInstanceAs(histogramB)
}
"return the original Histogram when registering a fully configured Histogram for second time but with different settings" in {
- val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Fine, 50000L)
+ val histogramA = Kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = Kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 50000, 2))
histogramA shouldBe theSameInstanceAs(histogramB)
}
"allow registering a Histogram that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
+ Kamon.userMetrics.histogram("histogram-with-default-configuration")
}
"allow registering a Counter and get the same Counter if registering again" in {
- val counterA = Kamon(UserMetrics).registerCounter("counter")
- val counterB = Kamon(UserMetrics).registerCounter("counter")
+ val counterA = Kamon.userMetrics.counter("counter")
+ val counterB = Kamon.userMetrics.counter("counter")
counterA shouldBe theSameInstanceAs(counterB)
}
"allow registering a fully configured MinMaxCounter and get the same MinMaxCounter if registering again" in {
- val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+ val minMaxCounterA = Kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = Kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
}
"return the original MinMaxCounter when registering a fully configured MinMaxCounter for second time but with different settings" in {
- val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Fine, 5000L, 1 second)
+ val minMaxCounterA = Kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = Kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 50000, 2), 1 second)
minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
}
"allow registering a MinMaxCounter that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-default-configuration")
+ Kamon.userMetrics.minMaxCounter("min-max-counter-with-default-configuration")
}
"allow registering a fully configured Gauge and get the same Gauge if registering again" in {
- val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeA = Kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
- val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeB = Kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
gaugeA shouldBe theSameInstanceAs(gaugeB)
}
"return the original Gauge when registering a fully configured Gauge for second time but with different settings" in {
- val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeA = Kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
- val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Fine, 5000L, 1 second) {
+ val gaugeB = Kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
gaugeA shouldBe theSameInstanceAs(gaugeB)
}
"allow registering a Gauge that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") {
+ Kamon.userMetrics.gauge("gauge-with-default-configuration", {
() ⇒ 2L
- }
+ })
}
"allow un-registering user metrics" in {
- val metricsExtension = Kamon(Metrics)
- Kamon(UserMetrics).registerCounter("counter-for-remove")
- Kamon(UserMetrics).registerHistogram("histogram-for-remove")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-remove")
- Kamon(UserMetrics).registerGauge("gauge-for-remove") { () ⇒ 2L }
-
- metricsExtension.storage.keys should contain(UserCounter("counter-for-remove"))
- metricsExtension.storage.keys should contain(UserHistogram("histogram-for-remove"))
- metricsExtension.storage.keys should contain(UserMinMaxCounter("min-max-counter-for-remove"))
- metricsExtension.storage.keys should contain(UserGauge("gauge-for-remove"))
-
- Kamon(UserMetrics).removeCounter("counter-for-remove")
- Kamon(UserMetrics).removeHistogram("histogram-for-remove")
- Kamon(UserMetrics).removeMinMaxCounter("min-max-counter-for-remove")
- Kamon(UserMetrics).removeGauge("gauge-for-remove")
-
- metricsExtension.storage.keys should not contain (UserCounter("counter-for-remove"))
- metricsExtension.storage.keys should not contain (UserHistogram("histogram-for-remove"))
- metricsExtension.storage.keys should not contain (UserMinMaxCounter("min-max-counter-for-remove"))
- metricsExtension.storage.keys should not contain (UserGauge("gauge-for-remove"))
- }
-
- "include all the registered metrics in the a tick snapshot and reset all recorders" in {
- Kamon(Metrics).subscribe(UserHistograms, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserCounters, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserMinMaxCounters, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserGauges, "*", testActor, permanently = true)
-
- val histogramWithSettings = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramWithDefaultConfiguration = Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
- val counter = Kamon(UserMetrics).registerCounter("counter")
- val minMaxCounterWithSettings = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val gauge = Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") { () ⇒ 2L }
-
- // lets put some values on those metrics
- histogramWithSettings.record(10)
- histogramWithSettings.record(20, 100)
- histogramWithDefaultConfiguration.record(40)
-
- counter.increment()
- counter.increment(16)
-
- minMaxCounterWithSettings.increment(43)
- minMaxCounterWithSettings.decrement()
-
- gauge.record(15)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- val firstSnapshot = expectMsgType[TickMetricSnapshot].metrics
-
- firstSnapshot.keys should contain allOf (
- UserHistogram("histogram-with-settings"),
- UserHistogram("histogram-with-default-configuration"))
-
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (10)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (20)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(101)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(10, 1),
- MutableRecord(20, 100))
-
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (40)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (40)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(40, 1))
-
- firstSnapshot(UserCounter("counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(17)
-
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (43)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(0, 1), // min
- MutableRecord(42, 1), // current
- MutableRecord(43, 1)) // max
-
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(0, 3)) // min, max and current
-
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (15)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (15)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(15, 1)) // only the manually recorded value
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- val secondSnapshot = expectMsgType[TickMetricSnapshot].metrics
-
- secondSnapshot.keys should contain allOf (
- UserHistogram("histogram-with-settings"),
- UserHistogram("histogram-with-default-configuration"))
-
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- secondSnapshot(UserCounter("counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(0)
-
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (42)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (42)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(42, 3)) // max
-
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(0, 3)) // min, max and current
-
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- Kamon(Metrics).unsubscribe(testActor)
- }
-
- "generate a snapshot that can be merged with another" in {
- val buffer = system.actorOf(TickMetricSnapshotBuffer.props(1 hours, testActor))
- Kamon(Metrics).subscribe(UserHistograms, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserCounters, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserMinMaxCounters, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserGauges, "*", buffer, permanently = true)
-
- val histogram = Kamon(UserMetrics).registerHistogram("histogram-for-merge")
- val counter = Kamon(UserMetrics).registerCounter("counter-for-merge")
- val minMaxCounter = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-merge")
- val gauge = Kamon(UserMetrics).registerGauge("gauge-for-merge") { () ⇒ 10L }
-
- histogram.record(100)
- counter.increment(10)
- minMaxCounter.increment(50)
- minMaxCounter.decrement(10)
- gauge.record(50)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- Thread.sleep(2000) // Make sure that the snapshots are taken before proceeding
-
- val extraCounter = Kamon(UserMetrics).registerCounter("extra-counter")
- histogram.record(200)
- extraCounter.increment(20)
- minMaxCounter.increment(40)
- minMaxCounter.decrement(50)
- gauge.record(70)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- Thread.sleep(2000) // Make sure that the metrics are buffered.
- buffer ! TickMetricSnapshotBuffer.FlushBuffer
- val snapshot = expectMsgType[TickMetricSnapshot].metrics
-
- snapshot.keys should contain(UserHistogram("histogram-for-merge"))
-
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (100)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (200)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(2)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(100, 1),
- MutableRecord(200, 1))
-
- snapshot(UserCounter("counter-for-merge")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(10)
- snapshot(UserCounter("extra-counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(20)
-
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (80)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(6)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(0, 1), // min in first snapshot
- MutableRecord(30, 2), // min and current in second snapshot
- MutableRecord(40, 1), // current in first snapshot
- MutableRecord(50, 1), // max in first snapshot
- MutableRecord(80, 1)) // max in second snapshot
-
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (50)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (70)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(2)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(50, 1),
- MutableRecord(70, 1))
-
- Kamon(Metrics).unsubscribe(testActor)
+ val counter = Kamon.userMetrics.counter("counter-for-remove")
+ val histogram = Kamon.userMetrics.histogram("histogram-for-remove")
+ val minMaxCounter = Kamon.userMetrics.minMaxCounter("min-max-counter-for-remove")
+ val gauge = Kamon.userMetrics.gauge("gauge-for-remove", { () ⇒ 2L })
+
+ Kamon.userMetrics.removeCounter("counter-for-remove")
+ Kamon.userMetrics.removeHistogram("histogram-for-remove")
+ Kamon.userMetrics.removeMinMaxCounter("min-max-counter-for-remove")
+ Kamon.userMetrics.removeGauge("gauge-for-remove")
+
+ counter should not be (theSameInstanceAs(Kamon.userMetrics.counter("counter-for-remove")))
+ histogram should not be (theSameInstanceAs(Kamon.userMetrics.histogram("histogram-for-remove")))
+ minMaxCounter should not be (theSameInstanceAs(Kamon.userMetrics.minMaxCounter("min-max-counter-for-remove")))
+ gauge should not be (theSameInstanceAs(Kamon.userMetrics.gauge("gauge-for-remove", { () ⇒ 2L })))
}
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
index 1a93e1f6..094baf4c 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
@@ -1,8 +1,23 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric.instrument
import java.nio.LongBuffer
-import kamon.metric.CollectionContext
import org.scalatest.{ Matchers, WordSpec }
class CounterSpec extends WordSpec with Matchers {
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
index 9192d999..ec07d66c 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
@@ -1,72 +1,79 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.metric.instrument
import java.util.concurrent.atomic.AtomicLong
-
-import akka.actor.ActorSystem
-import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.metric.{ Metrics, Scale, CollectionContext }
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class GaugeSpec extends WordSpecLike with Matchers {
- implicit val system = ActorSystem("gauge-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | flush-interval = 1 hour
- | default-collection-context-buffer-size = 10
- | precision {
- | default-gauge-precision {
- | refresh-interval = 100 milliseconds
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class GaugeSpec extends BaseKamonSpec("gauge-spec") {
"a Gauge" should {
- "automatically record the current value using the configured refresh-interval" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
-
+ "automatically record the current value using the configured refresh-interval" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
+
numberOfValuesRecorded.get() should be(10L +- 1L)
gauge.cleanup
}
- "stop automatically recording after a call to cleanup" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
-
+ "stop automatically recording after a call to cleanup" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
+
gauge.cleanup
numberOfValuesRecorded.get() should be(10L +- 1L)
Thread.sleep(1.second.toMillis)
+
numberOfValuesRecorded.get() should be(10L +- 1L)
}
- "produce a Histogram snapshot including all the recorded values" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
+ "produce a Histogram snapshot including all the recorded values" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
gauge.cleanup
- val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+ val snapshot = gauge.collect(Kamon.metrics.buildDefaultCollectionContext)
snapshot.numberOfMeasurements should be(10L +- 1L)
snapshot.min should be(1)
snapshot.max should be(10L +- 1L)
}
- "not record the current value when doing a collection" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge(Histogram.Precision.Normal, 10000L, Scale.Unit, 1 hour, system)(() ⇒ numberOfValuesRecorded.addAndGet(1))
-
- val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+ "not record the current value when doing a collection" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge(10 seconds)
+ val snapshot = gauge.collect(Kamon.metrics.buildDefaultCollectionContext)
snapshot.numberOfMeasurements should be(0)
numberOfValuesRecorded.get() should be(0)
}
}
+
+ trait GaugeFixture {
+ def createGauge(refreshInterval: FiniteDuration = 100 millis): (AtomicLong, Gauge) = {
+ val recordedValuesCounter = new AtomicLong(0)
+ val gauge = Gauge(DynamicRange(1, 100, 2), refreshInterval, Kamon.metrics.settings.refreshScheduler, {
+ () ⇒ recordedValuesCounter.addAndGet(1)
+ })
+
+ (recordedValuesCounter, gauge)
+ }
+
+ }
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
index c3060d4a..9a50e149 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
@@ -18,22 +18,13 @@ package kamon.metric.instrument
import java.nio.LongBuffer
-import com.typesafe.config.ConfigFactory
-import kamon.metric.CollectionContext
+import kamon.metric.instrument.Histogram.DynamicRange
import org.scalatest.{ Matchers, WordSpec }
import scala.util.Random
class HistogramSpec extends WordSpec with Matchers {
- val histogramConfig = ConfigFactory.parseString(
- """
- |
- |highest-trackable-value = 100000
- |significant-value-digits = 2
- |
- """.stripMargin)
-
"a Histogram" should {
"allow record values within the configured range" in new HistogramFixture {
histogram.record(1000)
@@ -109,7 +100,7 @@ class HistogramSpec extends WordSpec with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- val histogram = Histogram.fromConfig(histogramConfig)
+ val histogram = Histogram(DynamicRange(1, 100000, 2))
def takeSnapshot(): Histogram.Snapshot = histogram.collect(collectionContext)
}
@@ -119,17 +110,20 @@ class HistogramSpec extends WordSpec with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- val controlHistogram = Histogram.fromConfig(histogramConfig)
- val histogramA = Histogram.fromConfig(histogramConfig)
- val histogramB = Histogram.fromConfig(histogramConfig)
+ val controlHistogram = Histogram(DynamicRange(1, 100000, 2))
+ val histogramA = Histogram(DynamicRange(1, 100000, 2))
+ val histogramB = Histogram(DynamicRange(1, 100000, 2))
+
+ def takeSnapshotFrom(histogram: Histogram): InstrumentSnapshot = histogram.collect(collectionContext)
- def takeSnapshotFrom(histogram: Histogram): Histogram.Snapshot = histogram.collect(collectionContext)
+ def assertEquals(left: InstrumentSnapshot, right: InstrumentSnapshot): Unit = {
+ val leftSnapshot = left.asInstanceOf[Histogram.Snapshot]
+ val rightSnapshot = right.asInstanceOf[Histogram.Snapshot]
- def assertEquals(left: Histogram.Snapshot, right: Histogram.Snapshot): Unit = {
- left.numberOfMeasurements should equal(right.numberOfMeasurements)
- left.min should equal(right.min)
- left.max should equal(right.max)
- left.recordsIterator.toStream should contain theSameElementsAs (right.recordsIterator.toStream)
+ leftSnapshot.numberOfMeasurements should equal(rightSnapshot.numberOfMeasurements)
+ leftSnapshot.min should equal(rightSnapshot.min)
+ leftSnapshot.max should equal(rightSnapshot.max)
+ leftSnapshot.recordsIterator.toStream should contain theSameElementsAs (rightSnapshot.recordsIterator.toStream)
}
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
index 2c11adc3..7acfc229 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
@@ -19,19 +19,12 @@ import java.nio.LongBuffer
import akka.actor._
import akka.testkit.TestProbe
-import com.typesafe.config.ConfigFactory
-import kamon.metric.CollectionContext
-import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
-
-class MinMaxCounterSpec extends WordSpecLike with Matchers {
- implicit val system = ActorSystem("min-max-counter-spec")
- val minMaxCounterConfig = ConfigFactory.parseString(
- """
- |refresh-interval = 1 hour
- |highest-trackable-value = 1000
- |significant-value-digits = 2
- """.stripMargin)
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.{ DynamicRange, MutableRecord }
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class MinMaxCounterSpec extends BaseKamonSpec("min-max-counter-spec") {
"the MinMaxCounter" should {
"track ascending tendencies" in new MinMaxCounterFixture {
@@ -104,7 +97,7 @@ class MinMaxCounterSpec extends WordSpecLike with Matchers {
workers foreach (_ ! "increment")
for (refresh ← 1 to 1000) {
collectCounterSnapshot()
- Thread.sleep(10)
+ Thread.sleep(1)
}
monitor.expectNoMsg()
@@ -117,7 +110,7 @@ class MinMaxCounterSpec extends WordSpecLike with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(64)
}
- val mmCounter = MinMaxCounter.fromConfig(minMaxCounterConfig, system).asInstanceOf[PaddedMinMaxCounter]
+ val mmCounter = MinMaxCounter(DynamicRange(1, 1000, 2), 1 hour, Kamon.metrics.settings.refreshScheduler)
mmCounter.cleanup // cancel the refresh schedule
def collectCounterSnapshot(): Histogram.Snapshot = mmCounter.collect(collectionContext)
diff --git a/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala b/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala
new file mode 100644
index 00000000..357f997f
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala
@@ -0,0 +1,62 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.testkit
+
+import akka.testkit.{ ImplicitSender, TestKitBase }
+import akka.actor.ActorSystem
+import com.typesafe.config.{ Config, ConfigFactory }
+import kamon.Kamon
+import kamon.metric.{ SubscriptionsDispatcher, EntitySnapshot, MetricsExtensionImpl }
+import kamon.trace.TraceContext
+import kamon.util.LazyActorRef
+import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
+
+import scala.reflect.ClassTag
+
+abstract class BaseKamonSpec(actorSystemName: String) extends TestKitBase with WordSpecLike with Matchers with BeforeAndAfterAll {
+ lazy val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ implicit lazy val system: ActorSystem = {
+ Kamon.start(config.withFallback(ConfigFactory.load()))
+ ActorSystem(actorSystemName, config)
+ }
+
+ implicit def self = testActor
+
+ def config: Config =
+ ConfigFactory.empty()
+
+ def newContext(name: String): TraceContext =
+ Kamon.tracer.newContext(name)
+
+ def newContext(name: String, token: String): TraceContext =
+ Kamon.tracer.newContext(name, token)
+
+ def takeSnapshotOf(name: String, category: String): EntitySnapshot = {
+ val recorder = Kamon.metrics.find(name, category).get
+ recorder.collect(collectionContext)
+ }
+
+ def flushSubscriptions(): Unit = {
+ val subscriptionsField = Kamon.metrics.getClass.getDeclaredField("_subscriptions")
+ subscriptionsField.setAccessible(true)
+ val subscriptions = subscriptionsField.get(Kamon.metrics).asInstanceOf[LazyActorRef]
+
+ subscriptions.tell(SubscriptionsDispatcher.Tick)
+ }
+
+ override protected def afterAll(): Unit = system.shutdown()
+}
diff --git a/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala b/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala
new file mode 100644
index 00000000..1d270106
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala
@@ -0,0 +1,85 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class SimpleTraceSpec extends BaseKamonSpec("simple-trace-spec") {
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | metric {
+ | tick-interval = 1 hour
+ | }
+ |
+ | trace {
+ | level-of-detail = simple-trace
+ | sampling = all
+ | }
+ |}
+ """.stripMargin)
+
+ "the simple tracing" should {
+ "send a TraceInfo when the trace has finished and all segments are finished" in {
+ Kamon.tracer.subscribe(testActor)
+
+ TraceContext.withContext(newContext("simple-trace-without-segments")) {
+ TraceContext.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ TraceContext.currentContext.startSegment("segment-two", "test-segment", "test").finish()
+ TraceContext.currentContext.finish()
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.segments.size should be(2)
+ traceInfo.segments.find(_.name == "segment-one") should be('defined)
+ traceInfo.segments.find(_.name == "segment-two") should be('defined)
+ }
+
+ "incubate the tracing context if there are open segments after finishing" in {
+ Kamon.tracer.subscribe(testActor)
+
+ val secondSegment = TraceContext.withContext(newContext("simple-trace-without-segments")) {
+ TraceContext.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ val segment = TraceContext.currentContext.startSegment("segment-two", "test-segment", "test")
+ TraceContext.currentContext.finish()
+ segment
+ }
+
+ expectNoMsg(2 seconds)
+ secondSegment.finish()
+
+ within(10 seconds) {
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.segments.size should be(2)
+ traceInfo.segments.find(_.name == "segment-one") should be('defined)
+ traceInfo.segments.find(_.name == "segment-two") should be('defined)
+ }
+ }
+
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
index 838a1b98..d7cb7ea3 100644
--- a/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
@@ -1,95 +1,96 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package kamon.trace
-import akka.actor.ActorSystem
-import akka.testkit.TestKitBase
import com.typesafe.config.ConfigFactory
-import org.scalatest.{ Matchers, WordSpecLike }
-
-class TraceContextManipulationSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit def self = testActor
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 second
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
-
- "the TraceRecorder api" should {
+import kamon.testkit.BaseKamonSpec
+
+class TraceContextManipulationSpec extends BaseKamonSpec("trace-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ |}
+ """.stripMargin)
+
+ "the TraceContext api" should {
"allow starting a trace within a specified block of code, and only within that block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("start-context") {
- TraceRecorder.currentContext should not be empty
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("start-context")) {
+ TraceContext.currentContext should not be empty
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("start-context")
}
"allow starting a trace within a specified block of code, providing a trace-token and only within that block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("start-context-with-token", Some("token-1")) {
- TraceRecorder.currentContext should not be empty
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("start-context-with-token", "token-1")) {
+ TraceContext.currentContext should not be empty
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("start-context-with-token")
createdContext.token should be("token-1")
}
"allow providing a TraceContext and make it available within a block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("manually-provided-trace-context") { TraceRecorder.currentContext }
+ val createdContext = newContext("manually-provided-trace-context")
- TraceRecorder.currentContext shouldBe empty
- TraceRecorder.withTraceContext(createdContext) {
- TraceRecorder.currentContext should be(createdContext)
+ TraceContext.currentContext shouldBe empty
+ TraceContext.withContext(createdContext) {
+ TraceContext.currentContext should be(createdContext)
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
}
"allow renaming a trace" in {
- val createdContext = TraceRecorder.withNewTraceContext("trace-before-rename") {
- TraceRecorder.rename("renamed-trace")
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("trace-before-rename")) {
+ TraceContext.currentContext.rename("renamed-trace")
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("renamed-trace")
}
"allow creating a segment within a trace" in {
- val createdContext = TraceRecorder.withNewTraceContext("trace-with-segments") {
- val segment = TraceRecorder.currentContext.startSegment("segment-1", "segment-1-category", "segment-library")
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("trace-with-segments")) {
+ val segment = TraceContext.currentContext.startSegment("segment-1", "segment-1-category", "segment-library")
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("trace-with-segments")
}
"allow renaming a segment" in {
- TraceRecorder.withNewTraceContext("trace-with-renamed-segment") {
- val segment = TraceRecorder.currentContext.startSegment("original-segment-name", "segment-label", "segment-library")
+ TraceContext.withContext(newContext("trace-with-renamed-segment")) {
+ val segment = TraceContext.currentContext.startSegment("original-segment-name", "segment-label", "segment-library")
segment.name should be("original-segment-name")
segment.rename("new-segment-name")
diff --git a/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala b/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
index 927573c2..8bacca83 100644
--- a/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
@@ -16,19 +16,21 @@
package kamon.trace
-import akka.testkit.TestKit
-import akka.actor.ActorSystem
-import org.scalatest.{ OptionValues, Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceLocal.AvailableToMdc
+import kamon.trace.logging.MdcKeysSupport
import org.scalatest.concurrent.PatienceConfiguration
+import org.scalatest.OptionValues
+import org.slf4j.MDC
-class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordSpecLike with Matchers
- with PatienceConfiguration with OptionValues {
+class TraceLocalSpec extends BaseKamonSpec("trace-local-spec") with PatienceConfiguration with OptionValues with MdcKeysSupport {
+ val SampleTraceLocalKeyAvailableToMDC = AvailableToMdc("someKey")
object SampleTraceLocalKey extends TraceLocal.TraceLocalKey { type ValueType = String }
"the TraceLocal storage" should {
"allow storing and retrieving values" in {
- TraceRecorder.withNewTraceContext("store-and-retrieve-trace-local") {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local")) {
val testString = "Hello World"
TraceLocal.store(SampleTraceLocalKey)(testString)
@@ -37,7 +39,7 @@ class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordS
}
"return None when retrieving a non existent key" in {
- TraceRecorder.withNewTraceContext("non-existent-key") {
+ TraceContext.withContext(newContext("non-existent-key")) {
TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
}
}
@@ -48,18 +50,44 @@ class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordS
"be attached to the TraceContext when it is propagated" in {
val testString = "Hello World"
- val testContext = TraceRecorder.withNewTraceContext("manually-propagated-trace-local") {
+ val testContext = TraceContext.withContext(newContext("manually-propagated-trace-local")) {
TraceLocal.store(SampleTraceLocalKey)(testString)
TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
/** No TraceLocal should be available here */
TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
- TraceRecorder.withTraceContext(testContext) {
+ TraceContext.withContext(testContext) {
TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
}
}
+
+ "allow retrieve a value from the MDC when was created a key with AvailableToMdc(cool-key)" in {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
+ val testString = "Hello MDC"
+
+ TraceLocal.store(SampleTraceLocalKeyAvailableToMDC)(testString)
+ TraceLocal.retrieve(SampleTraceLocalKeyAvailableToMDC).value should equal(testString)
+
+ withMdc {
+ MDC.get("someKey") should equal(testString)
+ }
+ }
+ }
+
+ "allow retrieve a value from the MDC when was created a key with AvailableToMdc.storeForMdc(String, String)" in {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
+ val testString = "Hello MDC"
+
+ TraceLocal.storeForMdc("someKey", testString)
+ TraceLocal.retrieve(SampleTraceLocalKeyAvailableToMDC).value should equal(testString)
+
+ withMdc {
+ MDC.get("someKey") should equal(testString)
+ }
+ }
+ }
}
}
diff --git a/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala b/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala
new file mode 100644
index 00000000..ab98d0ac
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala
@@ -0,0 +1,73 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import org.scalatest.{ Matchers, WordSpecLike }
+
+class GlobPathFilterSpec extends WordSpecLike with Matchers {
+ "The GlobPathFilter" should {
+
+ "match a single expression" in {
+ val filter = new GlobPathFilter("/user/actor")
+
+ filter.accept("/user/actor") shouldBe true
+
+ filter.accept("/user/actor/something") shouldBe false
+ filter.accept("/user/actor/somethingElse") shouldBe false
+ }
+
+ "match all expressions in the same level" in {
+ val filter = new GlobPathFilter("/user/*")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/otherActor") shouldBe true
+
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match all expressions in the same levelss" in {
+ val filter = new GlobPathFilter("**")
+
+ filter.accept("GET: /ping") shouldBe true
+ filter.accept("GET: /ping/pong") shouldBe true
+ }
+
+ "match all expressions and crosses the path boundaries" in {
+ val filter = new GlobPathFilter("/user/actor-**")
+
+ filter.accept("/user/actor-") shouldBe true
+ filter.accept("/user/actor-one") shouldBe true
+ filter.accept("/user/actor-one/other") shouldBe true
+
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match exactly one character" in {
+ val filter = new GlobPathFilter("/user/actor-?")
+
+ filter.accept("/user/actor-1") shouldBe true
+ filter.accept("/user/actor-2") shouldBe true
+ filter.accept("/user/actor-3") shouldBe true
+
+ filter.accept("/user/actor-one") shouldBe false
+ filter.accept("/user/actor-two") shouldBe false
+ filter.accept("/user/actor-tree") shouldBe false
+ }
+ }
+}
diff --git a/kamon-datadog/src/main/resources/reference.conf b/kamon-datadog/src/main/resources/reference.conf
index de318820..07a5c8e4 100644
--- a/kamon-datadog/src/main/resources/reference.conf
+++ b/kamon-datadog/src/main/resources/reference.conf
@@ -1,6 +1,6 @@
-# ==================================== #
+# ===================================== #
# Kamon-Datadog Reference Configuration #
-# ==================================== #
+# ===================================== #
kamon {
datadog {
@@ -11,18 +11,19 @@ kamon {
# Interval between metrics data flushes to Datadog. It's value must be equal or greater than the
# kamon.metrics.tick-interval setting.
- flush-interval = 1 second
+ flush-interval = 10 seconds
# Max packet size for UDP metrics data sent to Datadog.
max-packet-size = 1024 bytes
# Subscription patterns used to select which metrics will be pushed to Datadog. Note that first, metrics
# collection for your desired entities must be activated under the kamon.metrics.filters settings.
- includes {
- actor = [ "*" ]
- trace = [ "*" ]
- dispatcher = [ "*" ]
- router = [ "*" ]
+ subscriptions {
+ trace = [ "**" ]
+ actor = [ "**" ]
+ dispatcher = [ "**" ]
+ user-metric = [ "**" ]
+ system-metric = [ "**" ]
}
# Enable system metrics
@@ -34,4 +35,12 @@ kamon {
# application.entity-name.metric-name
application-name = "kamon"
}
-}
+
+ modules {
+ kamon-datadog {
+ auto-start = yes
+ requires-aspectj = no
+ extension-id = "kamon.datadog.Datadog"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
index 6e29400f..152ba659 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
@@ -16,78 +16,45 @@
package kamon.datadog
+import java.net.InetSocketAddress
+import java.util.concurrent.TimeUnit.MILLISECONDS
+
import akka.actor._
+import akka.event.Logging
import kamon.Kamon
-import kamon.metric.UserMetrics.{ UserGauges, UserMinMaxCounters, UserCounters, UserHistograms }
+import kamon.util.ConfigTools.Syntax
import kamon.metric._
-import kamon.metrics._
-import kamon.metrics.CPUMetrics
-import scala.concurrent.duration._
+
import scala.collection.JavaConverters._
-import akka.event.Logging
-import java.net.InetSocketAddress
+import scala.concurrent.duration._
object Datadog extends ExtensionId[DatadogExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = Datadog
override def createExtension(system: ExtendedActorSystem): DatadogExtension = new DatadogExtension(system)
-
- trait MetricKeyGenerator {
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
- }
}
class DatadogExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ implicit val as = system
val log = Logging(system, classOf[DatadogExtension])
log.info("Starting the Kamon(Datadog) extension")
private val datadogConfig = system.settings.config.getConfig("kamon.datadog")
val datadogHost = new InetSocketAddress(datadogConfig.getString("hostname"), datadogConfig.getInt("port"))
- val flushInterval = datadogConfig.getMilliseconds("flush-interval")
+ val flushInterval = datadogConfig.getFiniteDuration("flush-interval")
val maxPacketSizeInBytes = datadogConfig.getBytes("max-packet-size")
- val tickInterval = system.settings.config.getMilliseconds("kamon.metrics.tick-interval")
+ val tickInterval = Kamon.metrics.settings.tickInterval
val datadogMetricsListener = buildMetricsListener(tickInterval, flushInterval)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", datadogMetricsListener, permanently = true)
-
- // Subscribe to Actors
- val includedActors = datadogConfig.getStringList("includes.actor").asScala
- for (actorPathPattern ← includedActors) {
- Kamon(Metrics)(system).subscribe(ActorMetrics, actorPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Routers
- val includedRouters = datadogConfig.getStringList("includes.router").asScala
- for (routerPathPattern ← includedRouters) {
- Kamon(Metrics)(system).subscribe(RouterMetrics, routerPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Traces
- val includedTraces = datadogConfig.getStringList("includes.trace").asScala
- for (tracePathPattern ← includedTraces) {
- Kamon(Metrics)(system).subscribe(TraceMetrics, tracePathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Dispatchers
- val includedDispatchers = datadogConfig.getStringList("includes.dispatcher").asScala
- for (dispatcherPathPattern ← includedDispatchers) {
- Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to SystemMetrics
- val includeSystemMetrics = datadogConfig.getBoolean("report-system-metrics")
- if (includeSystemMetrics) {
- List(CPUMetrics, ProcessCPUMetrics, MemoryMetrics, NetworkMetrics, GCMetrics, HeapMetrics, ContextSwitchesMetrics) foreach { metric ⇒
- Kamon(Metrics)(system).subscribe(metric, "*", datadogMetricsListener, permanently = true)
+ val subscriptions = datadogConfig.getConfig("subscriptions")
+ subscriptions.firstLevelKeys.map { subscriptionCategory ⇒
+ subscriptions.getStringList(subscriptionCategory).asScala.foreach { pattern ⇒
+ Kamon.metrics.subscribe(subscriptionCategory, pattern, datadogMetricsListener, permanently = true)
}
}
- def buildMetricsListener(tickInterval: Long, flushInterval: Long): ActorRef = {
+ def buildMetricsListener(tickInterval: FiniteDuration, flushInterval: FiniteDuration): ActorRef = {
assert(flushInterval >= tickInterval, "Datadog flush-interval needs to be equal or greater to the tick-interval")
val metricsSender = system.actorOf(DatadogMetricsSender.props(datadogHost, maxPacketSizeInBytes), "datadog-metrics-sender")
@@ -95,7 +62,7 @@ class DatadogExtension(system: ExtendedActorSystem) extends Kamon.Extension {
// No need to buffer the metrics, let's go straight to the metrics sender.
metricsSender
} else {
- system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsSender), "datadog-metrics-buffer")
+ system.actorOf(TickMetricSnapshotBuffer.props(flushInterval, metricsSender), "datadog-metrics-buffer")
}
}
}
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
index 195798fe..80d4f098 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
@@ -20,11 +20,10 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
-import kamon.metric.UserMetrics.UserMetricGroup
import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metric.{ MetricIdentity, MetricGroupIdentity }
+import kamon.metric.{ MetricKey, Entity }
import java.util.Locale
class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long) extends Actor with UdpExtensionProvider {
@@ -68,17 +67,19 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
}
case cs: Counter.Snapshot ⇒
- val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeDatadogCounter(cs.count))
- packetBuilder.appendMeasurement(key, measurementData)
+ if (cs.count > 0) {
+ val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeDatadogCounter(cs.count))
+ packetBuilder.appendMeasurement(key, measurementData)
+ }
}
}
packetBuilder.flush()
}
- def formatMeasurement(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity, measurementData: String): String =
+ def formatMeasurement(entity: Entity, metricKey: MetricKey, measurementData: String): String =
StringBuilder.newBuilder
.append(measurementData)
- .append(buildIdentificationTag(groupIdentity, metricIdentity))
+ .append(buildIdentificationTag(entity, metricKey))
.result()
def encodeDatadogTimer(level: Long, count: Long): String = {
@@ -88,23 +89,12 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
def encodeDatadogCounter(count: Long): String = count.toString + "|c"
- def buildMetricName(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String =
- if (isUserMetric(groupIdentity))
- s"$appName.${groupIdentity.category.name}.${groupIdentity.name}"
- else
- s"$appName.${groupIdentity.category.name}.${metricIdentity.name}"
-
- def buildIdentificationTag(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
- if (isUserMetric(groupIdentity)) "" else {
- // Make the automatic HTTP trace names a bit more friendly
- val normalizedEntityName = groupIdentity.name.replace(": ", ":")
- s"|#${groupIdentity.category.name}:${normalizedEntityName}"
- }
- }
+ def buildMetricName(entity: Entity, metricKey: MetricKey): String =
+ s"$appName.${entity.category}.${metricKey.name}"
- def isUserMetric(groupIdentity: MetricGroupIdentity): Boolean = groupIdentity match {
- case someUserMetric: UserMetricGroup ⇒ true
- case everythingElse ⇒ false
+ def buildIdentificationTag(entity: Entity, metricKey: MetricKey): String = {
+ val normalizedEntityName = entity.name.replace(": ", ":")
+ s"|#${entity.category}:${normalizedEntityName}"
}
}
diff --git a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
index 713db30d..b35902f9 100644
--- a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
+++ b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
@@ -19,108 +19,98 @@ package kamon.datadog
import akka.testkit.{ TestKitBase, TestProbe }
import akka.actor.{ Props, ActorRef, ActorSystem }
import kamon.Kamon
-import kamon.metric.instrument.Histogram.Precision
-import kamon.metric.instrument.{ Counter, Histogram, HdrHistogram, LongAdderCounter }
+import kamon.metric.instrument._
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
import org.scalatest.{ Matchers, WordSpecLike }
import kamon.metric._
import akka.io.Udp
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.lang.management.ManagementFactory
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
-class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("datadog-metric-sender-spec", ConfigFactory.parseString(
- """
- |kamon {
- | metrics {
- | disable-aspectj-weaver-missing-error = true
- | }
- |
- | datadog {
- | max-packet-size = 256 bytes
- | }
- |}
- |
- """.stripMargin))
-
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
+class DatadogMetricSenderSpec extends BaseKamonSpec("datadog-metric-sender-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | metrics {
+ | disable-aspectj-weaver-missing-error = true
+ | }
+ |
+ | datadog {
+ | max-packet-size = 256 bytes
+ | }
+ |}
+ |
+ """.stripMargin)
"the DataDogMetricSender" should {
"send latency measurements" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
+ val (entity, testRecorder) = buildRecorder("datadog")
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.processing-time:10|ms|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-one:10|ms|#category:datadog")
}
"include the sampling rate in case of multiple measurements of the same value" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(10L)
+ val (entity, testRecorder) = buildRecorder("datadog")
+ testRecorder.metricTwo.record(10L, 2)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.processing-time:10|ms|@0.5|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-two:10|ms|@0.5|#category:datadog")
}
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
+ val (entity, testRecorder) = buildRecorder("datadog")
var bytes = 0
var level = 0
while (bytes <= testMaxPacketSize) {
level += 1
- testRecorder.record(level)
- bytes += s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon".length
+ testRecorder.metricOne.record(level)
+ bytes += s"kamon.category.metric-one:$level|ms|#category:datadog".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-one:$level|ms|#category:datadog")
}
"render multiple keys in the same packet using newline as separator" in new UdpListenerFixture {
- val firstTestMetricName = "processing-time-1"
- val secondTestMetricName = "processing-time-2"
- val thirdTestMetricName = "counter"
+ val (entity, testRecorder) = buildRecorder("datadog")
- val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val thirdTestRecorder = Counter()
+ testRecorder.metricOne.record(10L, 2)
+ testRecorder.metricTwo.record(21L)
+ testRecorder.counterOne.increment(4L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(10L)
-
- secondTestRecorder.record(21L)
-
- thirdTestRecorder.increment(4L)
-
- val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(collectionContext),
- secondTestMetricName -> secondTestRecorder.collect(collectionContext),
- thirdTestMetricName -> thirdTestRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be("kamon.actor.processing-time-1:10|ms|@0.5|#actor:user/kamon\nkamon.actor.processing-time-2:21|ms|#actor:user/kamon\nkamon.actor.counter:4|c|#actor:user/kamon")
+ data.utf8String should be("kamon.category.metric-one:10|ms|@0.5|#category:datadog\nkamon.category.counter:4|c|#category:datadog\nkamon.category.metric-two:21|ms|#category:datadog")
}
+
}
trait UdpListenerFixture {
val localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
val testMaxPacketSize = system.settings.config.getBytes("kamon.datadog.max-packet-size")
- def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
+ def buildRecorder(name: String): (Entity, TestEntityRecorder) = {
+ val registration = Kamon.metrics.register(TestEntityRecorder, name).get
+ (registration.entity, registration.recorder)
+ }
+
+ def setup(metrics: Map[Entity, EntitySnapshot]): TestProbe = {
val udp = TestProbe()
val metricsSender = system.actorOf(Props(new DatadogMetricsSender(new InetSocketAddress(localhostName, 0), testMaxPacketSize) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
@@ -130,31 +120,21 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
udp.expectMsgType[Udp.SimpleSender]
udp.reply(Udp.SimpleSenderReady)
- // These names are not intented to match the real actor metrics, it's just about seeing more familiar data in tests.
- val testGroupIdentity = new MetricGroupIdentity {
- val name: String = "user/kamon"
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = "actor"
- }
- }
-
- val testMetrics = for ((metricName, snapshot) ← metrics) yield {
- val testMetricIdentity = new MetricIdentity {
- val name: String = metricName
- val tag: String = ""
- }
-
- (testMetricIdentity, snapshot)
- }
-
- metricsSender ! TickMetricSnapshot(0, 0, Map(testGroupIdentity -> new MetricGroupSnapshot {
- type GroupSnapshotType = Histogram.Snapshot
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
- }))
+ val fakeSnapshot = TickMetricSnapshot(MilliTimestamp.now, MilliTimestamp.now, metrics)
+ metricsSender ! fakeSnapshot
udp
}
}
}
+
+class TestEntityRecorder(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val metricOne = histogram("metric-one")
+ val metricTwo = histogram("metric-two")
+ val counterOne = counter("counter")
+}
+
+object TestEntityRecorder extends EntityRecorderFactory[TestEntityRecorder] {
+ def category: String = "category"
+ def createRecorder(instrumentFactory: InstrumentFactory): TestEntityRecorder = new TestEntityRecorder(instrumentFactory)
+}
diff --git a/kamon-jdbc/src/main/resources/META-INF/aop.xml b/kamon-jdbc/src/main/resources/META-INF/aop.xml
new file mode 100644
index 00000000..50529603
--- /dev/null
+++ b/kamon-jdbc/src/main/resources/META-INF/aop.xml
@@ -0,0 +1,12 @@
+<!DOCTYPE aspectj PUBLIC "-//AspectJ//DTD//EN" "http://www.eclipse.org/aspectj/dtd/aspectj.dtd">
+
+<aspectj>
+ <aspects>
+ <aspect name="kamon.jdbc.instrumentation.StatementInstrumentation"/>
+ </aspects>
+
+ <weaver>
+ <include within="java.sql.Statement..*"/>
+ <include within="java.sql.Connection..*"/>
+ </weaver>
+</aspectj>
diff --git a/kamon-jdbc/src/main/resources/reference.conf b/kamon-jdbc/src/main/resources/reference.conf
new file mode 100644
index 00000000..e058c873
--- /dev/null
+++ b/kamon-jdbc/src/main/resources/reference.conf
@@ -0,0 +1,26 @@
+# ================================== #
+# Kamon-jdbc Reference Configuration #
+# ================================== #
+
+kamon {
+ jdbc {
+ slow-query-threshold = 2 seconds
+
+ # Fully qualified name of the implementation of kamon.jdbc.SlowQueryProcessor.
+ slow-query-processor = kamon.jdbc.DefaultSlowQueryProcessor
+
+ # Fully qualified name of the implementation of kamon.jdbc.SqlErrorProcessor.
+ sql-error-processor = kamon.jdbc.DefaultSqlErrorProcessor
+
+ # Fully qualified name of the implementation of kamon.jdbc.JdbcNameGenerator that will be used for assigning names to segments.
+ name-generator = kamon.jdbc.DefaultJdbcNameGenerator
+ }
+
+ modules {
+ kamon-jdbc {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = "kamon.jdbc.Jdbc"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-jdbc/src/main/scala/kamon/jdbc/Jdbc.scala b/kamon-jdbc/src/main/scala/kamon/jdbc/Jdbc.scala
new file mode 100644
index 00000000..12de598d
--- /dev/null
+++ b/kamon-jdbc/src/main/scala/kamon/jdbc/Jdbc.scala
@@ -0,0 +1,82 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License") you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.jdbc
+
+import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
+import kamon.Kamon
+import kamon.util.ConfigTools.Syntax
+
+object Jdbc extends ExtensionId[JdbcExtension] with ExtensionIdProvider {
+ override def lookup(): ExtensionId[_ <: Extension] = Jdbc
+ override def createExtension(system: ExtendedActorSystem): JdbcExtension = new JdbcExtension(system)
+
+ val SegmentLibraryName = "jdbc"
+}
+
+class JdbcExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ private val config = system.settings.config.getConfig("kamon.jdbc")
+
+ private val nameGeneratorFQN = config.getString("name-generator")
+ private val nameGenerator: JdbcNameGenerator = system.dynamicAccess.createInstanceFor[JdbcNameGenerator](nameGeneratorFQN, Nil).get
+
+ private val slowQueryProcessorClass = config.getString("slow-query-processor")
+ private val slowQueryProcessor: SlowQueryProcessor = system.dynamicAccess.createInstanceFor[SlowQueryProcessor](slowQueryProcessorClass, Nil).get
+
+ private val sqlErrorProcessorClass = config.getString("sql-error-processor")
+ private val sqlErrorProcessor: SqlErrorProcessor = system.dynamicAccess.createInstanceFor[SqlErrorProcessor](sqlErrorProcessorClass, Nil).get
+
+ val slowQueryThreshold = config.getFiniteDuration("slow-query-threshold").toMillis
+
+ def processSlowQuery(sql: String, executionTime: Long) = slowQueryProcessor.process(sql, executionTime, slowQueryThreshold)
+ def processSqlError(sql: String, ex: Throwable) = sqlErrorProcessor.process(sql, ex)
+ def generateJdbcSegmentName(statement: String): String = nameGenerator.generateJdbcSegmentName(statement)
+}
+
+trait SlowQueryProcessor {
+ def process(sql: String, executionTime: Long, queryThreshold: Long): Unit
+}
+
+trait SqlErrorProcessor {
+ def process(sql: String, ex: Throwable): Unit
+}
+
+trait JdbcNameGenerator {
+ def generateJdbcSegmentName(statement: String): String
+}
+
+class DefaultJdbcNameGenerator extends JdbcNameGenerator {
+ def generateJdbcSegmentName(statement: String): String = s"Jdbc[$statement]"
+}
+
+class DefaultSqlErrorProcessor extends SqlErrorProcessor {
+ import org.slf4j.LoggerFactory
+
+ val log = LoggerFactory.getLogger(classOf[DefaultSqlErrorProcessor])
+
+ override def process(sql: String, cause: Throwable): Unit = {
+ log.error(s"the query [$sql] failed with exception [${cause.getMessage}]")
+ }
+}
+
+class DefaultSlowQueryProcessor extends SlowQueryProcessor {
+ import org.slf4j.LoggerFactory
+
+ val log = LoggerFactory.getLogger(classOf[DefaultSlowQueryProcessor])
+
+ override def process(sql: String, executionTimeInMillis: Long, queryThresholdInMillis: Long): Unit = {
+ log.warn(s"The query [$sql] took $executionTimeInMillis ms and the slow query threshold is $queryThresholdInMillis ms")
+ }
+}
diff --git a/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala b/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala
new file mode 100644
index 00000000..d169a4c7
--- /dev/null
+++ b/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala
@@ -0,0 +1,122 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.jdbc.instrumentation
+
+import java.util.concurrent.TimeUnit.{ NANOSECONDS ⇒ nanos }
+
+import kamon.Kamon
+import kamon.jdbc.{ JdbcExtension, Jdbc }
+import kamon.jdbc.metric.StatementsMetrics
+import kamon.trace.{ TraceContext, SegmentCategory }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation.{ Around, Aspect, Pointcut }
+import org.slf4j.LoggerFactory
+
+import scala.util.control.NonFatal
+
+@Aspect
+class StatementInstrumentation {
+
+ import StatementInstrumentation._
+
+ @Pointcut("call(* java.sql.Statement.execute*(..)) && args(sql)")
+ def onExecuteStatement(sql: String): Unit = {}
+
+ @Pointcut("call(* java.sql.Connection.prepareStatement(..)) && args(sql)")
+ def onExecutePreparedStatement(sql: String): Unit = {}
+
+ @Pointcut("call(* java.sql.Connection.prepareCall(..)) && args(sql)")
+ def onExecutePreparedCall(sql: String): Unit = {}
+
+ @Around("onExecuteStatement(sql) || onExecutePreparedStatement(sql) || onExecutePreparedCall(sql)")
+ def aroundExecuteStatement(pjp: ProceedingJoinPoint, sql: String): Any = {
+ TraceContext.map { ctx ⇒
+ val metricsExtension = Kamon.metrics
+ val jdbcExtension = Kamon(Jdbc)
+ implicit val statementRecorder = metricsExtension.register(StatementsMetrics, "jdbc-statements").map(_.recorder)
+
+ sql.replaceAll(CommentPattern, Empty) match {
+ case SelectStatement(_) ⇒ withSegment(ctx, Select, jdbcExtension)(recordRead(pjp, sql, jdbcExtension))
+ case InsertStatement(_) ⇒ withSegment(ctx, Insert, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
+ case UpdateStatement(_) ⇒ withSegment(ctx, Update, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
+ case DeleteStatement(_) ⇒ withSegment(ctx, Delete, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
+ case anythingElse ⇒
+ log.debug(s"Unable to parse sql [$sql]")
+ pjp.proceed()
+ }
+ }
+ } getOrElse pjp.proceed()
+
+ def withTimeSpent[A](thunk: ⇒ A)(timeSpent: Long ⇒ Unit): A = {
+ val start = System.nanoTime()
+ try thunk finally timeSpent(System.nanoTime() - start)
+ }
+
+ def withSegment[A](ctx: TraceContext, statement: String, jdbcExtension: JdbcExtension)(thunk: ⇒ A): A = {
+ val segmentName = jdbcExtension.generateJdbcSegmentName(statement)
+ val segment = ctx.startSegment(segmentName, SegmentCategory.Database, Jdbc.SegmentLibraryName)
+ try thunk finally segment.finish()
+ }
+
+ def recordRead(pjp: ProceedingJoinPoint, sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
+ withTimeSpent(pjp.proceedWithErrorHandler(sql, jdbcExtension)) { timeSpent ⇒
+ statementRecorder.map(stmr ⇒ stmr.reads.record(timeSpent))
+
+ val timeSpentInMillis = nanos.toMillis(timeSpent)
+
+ if (timeSpentInMillis >= jdbcExtension.slowQueryThreshold) {
+ statementRecorder.map(stmr ⇒ stmr.slows.increment())
+ jdbcExtension.processSlowQuery(sql, timeSpentInMillis)
+ }
+ }
+ }
+
+ def recordWrite(pjp: ProceedingJoinPoint, sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
+ withTimeSpent(pjp.proceedWithErrorHandler(sql, jdbcExtension)) { timeSpent ⇒
+ statementRecorder.map(stmr ⇒ stmr.writes.record(timeSpent))
+ }
+ }
+}
+
+object StatementInstrumentation {
+ val log = LoggerFactory.getLogger(classOf[StatementInstrumentation])
+
+ val SelectStatement = "(?i)^\\s*select.*?\\sfrom[\\s\\[]+([^\\]\\s,)(;]*).*".r
+ val InsertStatement = "(?i)^\\s*insert(?:\\s+ignore)?\\s+into\\s+([^\\s(,;]*).*".r
+ val UpdateStatement = "(?i)^\\s*update\\s+([^\\s,;]*).*".r
+ val DeleteStatement = "(?i)^\\s*delete\\s+from\\s+([^\\s,(;]*).*".r
+ val CommentPattern = "/\\*.*?\\*/" //for now only removes comments of kind / * anything * /
+ val Empty = ""
+ val Statements = "jdbc-statements"
+ val Select = "Select"
+ val Insert = "Insert"
+ val Update = "Update"
+ val Delete = "Delete"
+
+ implicit class PimpedProceedingJoinPoint(pjp: ProceedingJoinPoint) {
+ def proceedWithErrorHandler(sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
+ try {
+ pjp.proceed()
+ } catch {
+ case NonFatal(cause) ⇒
+ jdbcExtension.processSqlError(sql, cause)
+ statementRecorder.map(stmr ⇒ stmr.errors.increment())
+ throw cause
+ }
+ }
+ }
+}
+
diff --git a/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala b/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala
new file mode 100644
index 00000000..e1d6689c
--- /dev/null
+++ b/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala
@@ -0,0 +1,32 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.jdbc.metric
+
+import kamon.metric._
+import kamon.metric.instrument.{ Time, InstrumentFactory }
+
+class StatementsMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val reads = histogram("reads", Time.Nanoseconds)
+ val writes = histogram("writes", Time.Nanoseconds)
+ val slows = counter("slows")
+ val errors = counter("errors")
+}
+
+object StatementsMetrics extends EntityRecorderFactory[StatementsMetrics] {
+ def category: String = "jdbc-statements"
+ def createRecorder(instrumentFactory: InstrumentFactory): StatementsMetrics = new StatementsMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-jdbc/src/test/resources/logback.xml b/kamon-jdbc/src/test/resources/logback.xml
new file mode 100644
index 00000000..c336bbfe
--- /dev/null
+++ b/kamon-jdbc/src/test/resources/logback.xml
@@ -0,0 +1,12 @@
+<configuration>
+ <statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="OFF">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala b/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala
new file mode 100644
index 00000000..e150d967
--- /dev/null
+++ b/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala
@@ -0,0 +1,189 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License") you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.jdbc.instrumentation
+
+import java.sql.{ DriverManager, SQLException }
+
+import com.typesafe.config.ConfigFactory
+import kamon.jdbc.{ Jdbc, JdbcNameGenerator, SqlErrorProcessor, SlowQueryProcessor }
+import kamon.metric.TraceMetricsSpec
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ SegmentCategory, TraceContext }
+
+class StatementInstrumentationSpec extends BaseKamonSpec("jdbc-spec") {
+ import TraceMetricsSpec.SegmentSyntax
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | jdbc {
+ | slow-query-threshold = 100 milliseconds
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.SlowQueryProcessor.
+ | slow-query-processor = kamon.jdbc.instrumentation.NoOpSlowQueryProcessor
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.SqlErrorProcessor.
+ | sql-error-processor = kamon.jdbc.instrumentation.NoOpSqlErrorProcessor
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.JdbcNameGenerator
+ | name-generator = kamon.jdbc.instrumentation.NoOpJdbcNameGenerator
+ | }
+ |}
+ """.stripMargin)
+
+ val connection = DriverManager.getConnection("jdbc:h2:mem:jdbc-spec", "SA", "")
+
+ override protected def beforeAll(): Unit = {
+ connection should not be null
+
+ val create = "CREATE TABLE Address (Nr INTEGER, Name VARCHAR(128));"
+ val createStatement = connection.createStatement()
+ createStatement.executeUpdate(create)
+
+ val sleep = "CREATE ALIAS SLEEP FOR \"java.lang.Thread.sleep(long)\""
+ val sleepStatement = connection.createStatement()
+ sleepStatement.executeUpdate(sleep)
+ }
+
+ "the StatementInstrumentation" should {
+ "record the execution time of INSERT operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-insert")) {
+ for (id ← 1 to 100) {
+ val insert = s"INSERT INTO Address (Nr, Name) VALUES($id, 'foo')"
+ val insertStatement = connection.prepareStatement(insert)
+ insertStatement.execute()
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
+
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-insert", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Insert]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+
+ "record the execution time of SELECT operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-select")) {
+ for (id ← 1 to 100) {
+ val select = s"SELECT * FROM Address where Nr = $id"
+ val selectStatement = connection.createStatement()
+ selectStatement.execute(select)
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("reads").get.numberOfMeasurements should be(100)
+
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-select", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Select]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+
+ "record the execution time of UPDATE operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-update")) {
+ for (id ← 1 to 100) {
+ val update = s"UPDATE Address SET Name = 'bar$id' where Nr = $id"
+ val updateStatement = connection.prepareStatement(update)
+ updateStatement.execute()
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
+
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-update", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Update]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+
+ "record the execution time of DELETE operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-delete")) {
+ for (id ← 1 to 100) {
+ val delete = s"DELETE FROM Address where Nr = $id"
+ val deleteStatement = connection.createStatement()
+ deleteStatement.execute(delete)
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
+
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-delete", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Delete]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+
+ }
+
+ "record the execution time of SLOW QUERIES based on the kamon.jdbc.slow-query-threshold" in {
+ TraceContext.withContext(newContext("jdbc-trace-slow")) {
+ for (id ← 1 to 2) {
+ val select = s"SELECT * FROM Address; CALL SLEEP(100)"
+ val selectStatement = connection.createStatement()
+ selectStatement.execute(select)
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.counter("slows").get.count should be(2)
+
+ }
+
+ "count all SQL ERRORS" in {
+ TraceContext.withContext(newContext("jdbc-trace-errors")) {
+ for (_ ← 1 to 10) {
+ intercept[SQLException] {
+ val error = "SELECT * FROM NO_EXISTENT_TABLE"
+ val errorStatement = connection.createStatement()
+ errorStatement.execute(error)
+ }
+ }
+
+ TraceContext.currentContext.finish()
+ }
+
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.counter("errors").get.count should be(10)
+ }
+ }
+}
+
+class NoOpSlowQueryProcessor extends SlowQueryProcessor {
+ override def process(sql: String, executionTimeInMillis: Long, queryThresholdInMillis: Long): Unit = { /*do nothing!!!*/ }
+}
+
+class NoOpSqlErrorProcessor extends SqlErrorProcessor {
+ override def process(sql: String, ex: Throwable): Unit = { /*do nothing!!!*/ }
+}
+
+class NoOpJdbcNameGenerator extends JdbcNameGenerator {
+ override def generateJdbcSegmentName(statement: String): String = s"Jdbc[$statement]"
+} \ No newline at end of file
diff --git a/kamon-log-reporter/src/main/resources/reference.conf b/kamon-log-reporter/src/main/resources/reference.conf
index dea218eb..080d8a76 100644
--- a/kamon-log-reporter/src/main/resources/reference.conf
+++ b/kamon-log-reporter/src/main/resources/reference.conf
@@ -4,9 +4,15 @@
kamon {
log-reporter {
-
- # Enable system metrics
- # In order to not get a ClassNotFoundException, we must register the kamon-sytem-metrics module
+ # Decide whether the log reporter should log system metrics, if available.
report-system-metrics = false
}
+
+ modules {
+ kamon-log-reporter {
+ auto-start = yes
+ requires-aspectj = no
+ extension-id = "kamon.logreporter.LogReporter"
+ }
+ }
}
diff --git a/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
index fd76f50c..d5b07f6d 100644
--- a/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
+++ b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
@@ -19,27 +19,13 @@ package kamon.logreporter
import akka.actor._
import akka.event.Logging
import kamon.Kamon
-import kamon.metric.ActorMetrics.ActorMetricSnapshot
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.metric.UserMetrics._
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsSnapshot
-import kamon.metrics.NetworkMetrics.NetworkMetricSnapshot
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsSnapshot
-import kamon.metrics._
-import kamon.metrics.CPUMetrics.CPUMetricSnapshot
object LogReporter extends ExtensionId[LogReporterExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = LogReporter
override def createExtension(system: ExtendedActorSystem): LogReporterExtension = new LogReporterExtension(system)
-
- trait MetricKeyGenerator {
- def localhostName: String
- def normalizedLocalhostName: String
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
- }
}
class LogReporterExtension(system: ExtendedActorSystem) extends Kamon.Extension {
@@ -47,25 +33,15 @@ class LogReporterExtension(system: ExtendedActorSystem) extends Kamon.Extension
log.info("Starting the Kamon(LogReporter) extension")
val logReporterConfig = system.settings.config.getConfig("kamon.log-reporter")
-
val subscriber = system.actorOf(Props[LogReporterSubscriber], "kamon-log-reporter")
- Kamon(Metrics)(system).subscribe(TraceMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ActorMetrics, "*", subscriber, permanently = true)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", subscriber, permanently = true)
+ Kamon.metrics.subscribe("trace", "**", subscriber, permanently = true)
+ Kamon.metrics.subscribe("actor", "**", subscriber, permanently = true)
+ Kamon.metrics.subscribe("user-metrics", "**", subscriber, permanently = true)
val includeSystemMetrics = logReporterConfig.getBoolean("report-system-metrics")
-
if (includeSystemMetrics) {
- // Subscribe to SystemMetrics
- Kamon(Metrics)(system).subscribe(CPUMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ProcessCPUMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(NetworkMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ContextSwitchesMetrics, "*", subscriber, permanently = true)
+ Kamon.metrics.subscribe("system-metric", "**", subscriber, permanently = true)
}
}
@@ -78,32 +54,25 @@ class LogReporterSubscriber extends Actor with ActorLogging {
}
def printMetricSnapshot(tick: TickMetricSnapshot): Unit = {
- // Group all the user metrics together.
- val histograms = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
- val counters = Map.newBuilder[MetricGroupIdentity, Counter.Snapshot]
- val minMaxCounters = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
- val gauges = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
-
tick.metrics foreach {
- case (identity, ams: ActorMetricSnapshot) ⇒ logActorMetrics(identity.name, ams)
- case (identity, tms: TraceMetricsSnapshot) ⇒ logTraceMetrics(identity.name, tms)
- case (h: UserHistogram, s: UserHistogramSnapshot) ⇒ histograms += (h -> s.histogramSnapshot)
- case (c: UserCounter, s: UserCounterSnapshot) ⇒ counters += (c -> s.counterSnapshot)
- case (m: UserMinMaxCounter, s: UserMinMaxCounterSnapshot) ⇒ minMaxCounters += (m -> s.minMaxCounterSnapshot)
- case (g: UserGauge, s: UserGaugeSnapshot) ⇒ gauges += (g -> s.gaugeSnapshot)
- case (_, cms: CPUMetricSnapshot) ⇒ logCpuMetrics(cms)
- case (_, pcms: ProcessCPUMetricsSnapshot) ⇒ logProcessCpuMetrics(pcms)
- case (_, nms: NetworkMetricSnapshot) ⇒ logNetworkMetrics(nms)
- case (_, csms: ContextSwitchesMetricsSnapshot) ⇒ logContextSwitchesMetrics(csms)
- case ignoreEverythingElse ⇒
+ case (entity, snapshot) if entity.category == "actor" ⇒ logActorMetrics(entity.name, snapshot)
+ case (entity, snapshot) if entity.category == "trace" ⇒ logTraceMetrics(entity.name, snapshot)
+ case (entity, snapshot) if entity.category == "user-metric" ⇒ logUserMetrics(snapshot)
+ case (entity, snapshot) if entity.category == "system-metric" ⇒ logSystemMetrics(entity.name, snapshot)
+ case ignoreEverythingElse ⇒
}
-
- logUserMetrics(histograms.result(), counters.result(), minMaxCounters.result(), gauges.result())
}
- def logActorMetrics(name: String, ams: ActorMetricSnapshot): Unit = {
- log.info(
- """
+ def logActorMetrics(name: String, actorSnapshot: EntitySnapshot): Unit = {
+ for {
+ processingTime ← actorSnapshot.histogram("processing-time")
+ timeInMailbox ← actorSnapshot.histogram("time-in-mailbox")
+ mailboxSize ← actorSnapshot.minMaxCounter("mailbox-size")
+ errors ← actorSnapshot.counter("errors")
+ } {
+
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Actor: %-83s |
@@ -119,46 +88,67 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- name,
- ams.processingTime.numberOfMeasurements, ams.timeInMailbox.numberOfMeasurements, ams.mailboxSize.min,
- ams.processingTime.min, ams.timeInMailbox.min, ams.mailboxSize.average,
- ams.processingTime.percentile(50.0D), ams.timeInMailbox.percentile(50.0D), ams.mailboxSize.max,
- ams.processingTime.percentile(90.0D), ams.timeInMailbox.percentile(90.0D),
- ams.processingTime.percentile(95.0D), ams.timeInMailbox.percentile(95.0D),
- ams.processingTime.percentile(99.0D), ams.timeInMailbox.percentile(99.0D), ams.errors.count,
- ams.processingTime.percentile(99.9D), ams.timeInMailbox.percentile(99.9D),
- ams.processingTime.max, ams.timeInMailbox.max))
+ .stripMargin.format(
+ name,
+ processingTime.numberOfMeasurements, timeInMailbox.numberOfMeasurements, mailboxSize.min,
+ processingTime.min, timeInMailbox.min, mailboxSize.average,
+ processingTime.percentile(50.0D), timeInMailbox.percentile(50.0D), mailboxSize.max,
+ processingTime.percentile(90.0D), timeInMailbox.percentile(90.0D),
+ processingTime.percentile(95.0D), timeInMailbox.percentile(95.0D),
+ processingTime.percentile(99.0D), timeInMailbox.percentile(99.0D), errors.count,
+ processingTime.percentile(99.9D), timeInMailbox.percentile(99.9D),
+ processingTime.max, timeInMailbox.max))
+ }
+
}
- def logCpuMetrics(cms: CPUMetricSnapshot): Unit = {
- import cms._
+ def logSystemMetrics(metric: String, snapshot: EntitySnapshot): Unit = metric match {
+ case "cpu" ⇒ logCpuMetrics(snapshot)
+ case "network" ⇒ logNetworkMetrics(snapshot)
+ case "process-cpu" ⇒ logProcessCpuMetrics(snapshot)
+ case "context-switches" ⇒ logContextSwitchesMetrics(snapshot)
+ case ignoreOthers ⇒
+ }
- log.info(
- """
+ def logCpuMetrics(cpuMetrics: EntitySnapshot): Unit = {
+ for {
+ user ← cpuMetrics.histogram("cpu-user")
+ system ← cpuMetrics.histogram("cpu-system")
+ cpuWait ← cpuMetrics.histogram("cpu-wait")
+ idle ← cpuMetrics.histogram("cpu-idle")
+ } {
+
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| CPU (ALL) |
|| |
|| User (percentage) System (percentage) Wait (percentage) Idle (percentage) |
|| Min: %-3s Min: %-3s Min: %-3s Min: %-3s |
- || Avg: %-3s Avg: %-3s Avg: %-3s Avg: %-3s |
+ || Avg: %-3s Avg: %-3s Avg: %-3s Avg: %-3s |
|| Max: %-3s Max: %-3s Max: %-3s Max: %-3s |
|| |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- user.min, system.min, cpuWait.min, idle.min,
- user.average, system.average, cpuWait.average, idle.average,
- user.max, system.max, cpuWait.max, idle.max))
+ .stripMargin.format(
+ user.min, system.min, cpuWait.min, idle.min,
+ user.average, system.average, cpuWait.average, idle.average,
+ user.max, system.max, cpuWait.max, idle.max))
+ }
}
- def logNetworkMetrics(nms: NetworkMetricSnapshot): Unit = {
- import nms._
+ def logNetworkMetrics(networkMetrics: EntitySnapshot): Unit = {
+ for {
+ rxBytes ← networkMetrics.histogram("rx-bytes")
+ txBytes ← networkMetrics.histogram("tx-bytes")
+ rxErrors ← networkMetrics.histogram("rx-errors")
+ txErrors ← networkMetrics.histogram("tx-errors")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Network (ALL) |
@@ -169,38 +159,50 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-4s Max: %-4s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- rxBytes.min, txBytes.min, rxErrors.sum, txErrors.sum,
- rxBytes.average, txBytes.average,
- rxBytes.max, txBytes.max))
+ .stripMargin.
+ format(
+ rxBytes.min, txBytes.min, rxErrors.sum, txErrors.sum,
+ rxBytes.average, txBytes.average,
+ rxBytes.max, txBytes.max))
+ }
}
- def logProcessCpuMetrics(pcms: ProcessCPUMetricsSnapshot): Unit = {
- import pcms._
+ def logProcessCpuMetrics(processCpuMetrics: EntitySnapshot): Unit = {
+ for {
+ user ← processCpuMetrics.histogram("process-user-cpu")
+ total ← processCpuMetrics.histogram("process-cpu")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Process-CPU |
|| |
- || Cpu-Percentage Total-Process-Time |
+ || User-Percentage Total-Percentage |
|| Min: %-12s Min: %-12s |
|| Avg: %-12s Avg: %-12s |
|| Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- (cpuPercent.min / 100), totalProcessTime.min,
- (cpuPercent.average / 100), totalProcessTime.average,
- (cpuPercent.max / 100), totalProcessTime.max))
+ .stripMargin.
+ format(
+ (user.min, total.min,
+ user.average, total.average,
+ user.max, total.max)))
+ }
+
}
- def logContextSwitchesMetrics(csms: ContextSwitchesMetricsSnapshot): Unit = {
- import csms._
+ def logContextSwitchesMetrics(contextSwitchMetrics: EntitySnapshot): Unit = {
+ for {
+ perProcessVoluntary ← contextSwitchMetrics.histogram("context-switches-process-voluntary")
+ perProcessNonVoluntary ← contextSwitchMetrics.histogram("context-switches-process-non-voluntary")
+ global ← contextSwitchMetrics.histogram("context-switches-global")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Context-Switches |
@@ -211,18 +213,24 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-12s Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- global.min, perProcessNonVoluntary.min, perProcessVoluntary.min,
- global.average, perProcessNonVoluntary.average, perProcessVoluntary.average,
- global.max, perProcessNonVoluntary.max, perProcessVoluntary.max))
+ .stripMargin.
+ format(
+ global.min, perProcessNonVoluntary.min, perProcessVoluntary.min,
+ global.average, perProcessNonVoluntary.average, perProcessVoluntary.average,
+ global.max, perProcessNonVoluntary.max, perProcessVoluntary.max))
+ }
}
- def logTraceMetrics(name: String, tms: TraceMetricsSnapshot): Unit = {
+ def logTraceMetrics(name: String, traceSnapshot: EntitySnapshot): Unit = {
val traceMetricsData = StringBuilder.newBuilder
- traceMetricsData.append(
- """
+ for {
+ elapsedTime ← traceSnapshot.histogram("elapsed-time")
+ } {
+
+ traceMetricsData.append(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Trace: %-83s |
@@ -230,22 +238,26 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| |
|| Elapsed Time (nanoseconds): |
|"""
- .stripMargin.format(
- name, tms.elapsedTime.numberOfMeasurements))
-
- traceMetricsData.append(compactHistogramView(tms.elapsedTime))
- traceMetricsData.append(
- """
- || |
- |+--------------------------------------------------------------------------------------------------+"""
- .stripMargin)
-
- log.info(traceMetricsData.toString())
+ .stripMargin.format(
+ name, elapsedTime.numberOfMeasurements))
+
+ traceMetricsData.append(compactHistogramView(elapsedTime))
+ traceMetricsData.append(
+ """
+ || |
+ |+--------------------------------------------------------------------------------------------------+"""
+ .
+ stripMargin)
+
+ log.info(traceMetricsData.toString())
+ }
}
- def logUserMetrics(histograms: Map[MetricGroupIdentity, Histogram.Snapshot],
- counters: Map[MetricGroupIdentity, Counter.Snapshot], minMaxCounters: Map[MetricGroupIdentity, Histogram.Snapshot],
- gauges: Map[MetricGroupIdentity, Histogram.Snapshot]): Unit = {
+ def logUserMetrics(userMetrics: EntitySnapshot): Unit = {
+ val histograms = userMetrics.histograms
+ val minMaxCounters = userMetrics.minMaxCounters
+ val gauges = userMetrics.gauges
+ val counters = userMetrics.counters
if (histograms.isEmpty && counters.isEmpty && minMaxCounters.isEmpty && gauges.isEmpty) {
log.info("No user metrics reported")
diff --git a/kamon-newrelic/src/main/resources/reference.conf b/kamon-newrelic/src/main/resources/reference.conf
index c86e64ae..9dc793e1 100644
--- a/kamon-newrelic/src/main/resources/reference.conf
+++ b/kamon-newrelic/src/main/resources/reference.conf
@@ -14,12 +14,23 @@ kamon {
# Your New Relic license key.
license-key = e7d350b14228f3d28f35bc3140df2c3e565ea5d5
+ # Time to wait for a response when calling any of the New Relic collector API methods.
+ operation-timeout = 30 seconds
+
# attempts to send pending metrics in the next tick,
# combining the current metrics plus the pending, after max-retry, deletes all pending metrics
- max-initialize-retries = 3
+ max-connect-retries = 3
# delay between connection attempts to NewRelic collector
- initialize-retry-delay = 30 seconds
+ connect-retry-delay = 30 seconds
+ }
+
+ modules {
+ kamon-newrelic {
+ auto-start = yes
+ requires-aspectj = no
+ extension-id = "kamon.newrelic.NewRelic"
+ }
}
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/Agent.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/Agent.scala
index 6244c0ad..5f6383f8 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/Agent.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/Agent.scala
@@ -18,113 +18,129 @@ package kamon.newrelic
import java.util.concurrent.TimeUnit.{ MILLISECONDS ⇒ milliseconds }
-import akka.actor.{ ActorSystem, ActorLogging, Actor }
-import akka.event.LoggingAdapter
+import akka.actor.{ ActorLogging, Actor }
import akka.io.IO
import akka.util.Timeout
-import kamon.Kamon
-import kamon.metric.{ CollectionContext, Metrics }
+import com.typesafe.config.Config
import spray.can.Http
import spray.json._
-import scala.concurrent.{ ExecutionContext, Future }
-import spray.httpx.{ SprayJsonSupport, ResponseTransformation }
-import spray.http._
+import scala.concurrent.Future
+import spray.httpx.SprayJsonSupport
import spray.json.lenses.JsonLenses._
import java.lang.management.ManagementFactory
-import spray.http.Uri.Query
import scala.concurrent.duration._
import Agent._
-
+import JsonProtocol._
import akka.pattern.pipe
-// TODO: Setup a proper host connector with custom timeout configuration for use with this.
-class Agent extends Actor with ClientPipelines with ResponseTransformation with SprayJsonSupport with ActorLogging {
- import JsonProtocol._
+class Agent extends Actor with SprayJsonSupport with ActorLogging {
import context.dispatcher
- implicit val operationTimeout = Timeout(30 seconds)
- val collectorClient = compressedToJsonPipeline(IO(Http)(context.system))
- val settings = buildAgentSettings(context.system)
- val baseQuery = Query(
- "license_key" -> settings.licenseKey,
- "marshal_format" -> "json",
- "protocol_version" -> "12")
+ val agentSettings = AgentSettings.fromConfig(context.system.settings.config)
+
+ // Start the reporters
+ context.actorOf(MetricReporter.props(agentSettings), "metric-reporter")
// Start the connection to the New Relic collector.
- self ! Initialize
+ self ! Connect
+
+ def receive: Receive = disconnected(agentSettings.maxConnectionRetries)
- def receive: Receive = uninitialized(settings.maxRetries)
+ def disconnected(attemptsLeft: Int): Receive = {
+ case Connect ⇒ pipe(connectToCollector) to self
+ case Connected(collector, runID) ⇒ configureChildren(collector, runID)
+ case ConnectFailed(reason) if (attemptsLeft > 0) ⇒ scheduleReconnection(reason, attemptsLeft)
+ case ConnectFailed(reason) ⇒ giveUpConnection()
+ }
- def uninitialized(attemptsLeft: Int): Receive = {
- case Initialize ⇒ pipe(connectToCollector) to self
- case Initialized(runID, collector) ⇒
- log.info("Agent initialized with runID: [{}] and collector: [{}]", runID, collector)
+ def connected: Receive = {
+ case Reconnect ⇒ reconnect()
+ case Shutdown ⇒ shutdown()
+ }
- val baseCollectorUri = Uri(s"http://$collector/agent_listener/invoke_raw_method").withQuery(baseQuery)
- context.actorOf(MetricReporter.props(settings, runID, baseCollectorUri), "metric-reporter")
+ def reconnect(): Unit = {
+ log.warning("New Relic request the agent to restart the connection, all reporters will be paused until a new connection is available.")
+ self ! Connect
+ context.children.foreach(_ ! ResetConfiguration)
+ context become disconnected(agentSettings.maxConnectionRetries)
+ }
- case InitializationFailed(reason) if (attemptsLeft > 0) ⇒
- log.error(reason, "Initialization failed, retrying in {} seconds", settings.retryDelay.toSeconds)
- context.system.scheduler.scheduleOnce(settings.retryDelay, self, Initialize)
- context become (uninitialized(attemptsLeft - 1))
+ def shutdown(): Unit = {
+ log.error("New Relic requested the agent to be stopped, no metrics will be reported after this point.")
+ context stop self
+ }
+
+ def configureChildren(collector: String, runID: Long): Unit = {
+ log.info("Configuring New Relic reporters to use runID: [{}] and collector: [{}]", runID, collector)
+ context.children.foreach(_ ! Configure(collector, runID))
+ context become connected
+ }
+
+ def scheduleReconnection(connectionFailureReason: Throwable, attemptsLeft: Int): Unit = {
+ log.error(connectionFailureReason, "Initialization failed, retrying in {} seconds", agentSettings.retryDelay.toSeconds)
+ context.system.scheduler.scheduleOnce(agentSettings.retryDelay, self, Connect)
+ context become (disconnected(attemptsLeft - 1))
+ }
- case InitializationFailed(reason) ⇒
- log.error(reason, "Giving up while trying to set up a connection with the New Relic collector.")
- context.stop(self)
+ def giveUpConnection(): Unit = {
+ log.error("Giving up while trying to set up a connection with the New Relic collector. The New Relic module is shutting down itself.")
+ context.stop(self)
}
- def connectToCollector: Future[InitResult] = {
+ def connectToCollector: Future[ConnectResult] = {
(for {
collector ← selectCollector
- runId ← connect(collector, settings)
- } yield Initialized(runId, collector)) recover { case error ⇒ InitializationFailed(error) }
+ runID ← connect(collector, agentSettings)
+ } yield Connected(collector, runID)) recover { case error ⇒ ConnectFailed(error) }
}
def selectCollector: Future[String] = {
- val query = ("method" -> "get_redirect_host") +: baseQuery
- val getRedirectHostUri = Uri("http://collector.newrelic.com/agent_listener/invoke_raw_method").withQuery(query)
-
- collectorClient {
- Post(getRedirectHostUri, JsArray())
-
- } map { json ⇒
+ val apiClient = new ApiMethodClient("collector.newrelic.com", None, agentSettings, IO(Http)(context.system))
+ apiClient.invokeMethod(RawMethods.GetRedirectHost, JsArray()) map { json ⇒
json.extract[String]('return_value)
}
}
- def connect(collectorHost: String, connect: Settings): Future[Long] = {
- log.debug("Connecting to NewRelic Collector [{}]", collectorHost)
-
- val query = ("method" -> "connect") +: baseQuery
- val connectUri = Uri(s"http://$collectorHost/agent_listener/invoke_raw_method").withQuery(query)
-
- collectorClient {
- Post(connectUri, connect)
-
- } map { json ⇒
+ def connect(collectorHost: String, connect: AgentSettings): Future[Long] = {
+ val apiClient = new ApiMethodClient(collectorHost, None, agentSettings, IO(Http)(context.system))
+ apiClient.invokeMethod(RawMethods.Connect, connect) map { json ⇒
json.extract[Long]('return_value / 'agent_run_id)
}
}
}
object Agent {
- case object Initialize
- sealed trait InitResult
- case class Initialized(runId: Long, collector: String) extends InitResult
- case class InitializationFailed(reason: Throwable) extends InitResult
- case class Settings(licenseKey: String, appName: String, host: String, pid: Int, maxRetries: Int, retryDelay: FiniteDuration, apdexT: Double)
-
- def buildAgentSettings(system: ActorSystem) = {
- val config = system.settings.config.getConfig("kamon.newrelic")
- val appName = config.getString("app-name")
- val licenseKey = config.getString("license-key")
- val maxRetries = config.getInt("max-initialize-retries")
- val retryDelay = FiniteDuration(config.getMilliseconds("initialize-retry-delay"), milliseconds)
- val apdexT: Double = config.getMilliseconds("apdexT").toDouble
+ case object Connect
+ case object Reconnect
+ case object Shutdown
+ case object ResetConfiguration
+ case class Configure(collector: String, runID: Long)
+
+ sealed trait ConnectResult
+ case class Connected(collector: String, runID: Long) extends ConnectResult
+ case class ConnectFailed(reason: Throwable) extends ConnectResult
+}
+
+case class AgentSettings(licenseKey: String, appName: String, hostname: String, pid: Int, operationTimeout: Timeout,
+ maxConnectionRetries: Int, retryDelay: FiniteDuration, apdexT: Double)
+
+object AgentSettings {
+
+ def fromConfig(config: Config) = {
+ import kamon.util.ConfigTools.Syntax
// Name has the format of 'pid'@'host'
val runtimeName = ManagementFactory.getRuntimeMXBean.getName.split('@')
-
- Agent.Settings(licenseKey, appName, runtimeName(1), runtimeName(0).toInt, maxRetries, retryDelay, apdexT)
+ val newRelicConfig = config.getConfig("kamon.newrelic")
+
+ AgentSettings(
+ newRelicConfig.getString("license-key"),
+ newRelicConfig.getString("app-name"),
+ runtimeName(1),
+ runtimeName(0).toInt,
+ Timeout(newRelicConfig.getFiniteDuration("operation-timeout")),
+ newRelicConfig.getInt("max-connect-retries"),
+ newRelicConfig.getFiniteDuration("connect-retry-delay"),
+ newRelicConfig.getFiniteDuration("apdexT").toMillis / 1E3D)
}
} \ No newline at end of file
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/ApiMethodClient.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/ApiMethodClient.scala
new file mode 100644
index 00000000..263faa63
--- /dev/null
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/ApiMethodClient.scala
@@ -0,0 +1,68 @@
+package kamon.newrelic
+
+import akka.actor.ActorRef
+import kamon.newrelic.ApiMethodClient.{ NewRelicException, AgentShutdownRequiredException, AgentRestartRequiredException }
+import spray.http.Uri.Query
+import spray.http._
+import spray.httpx.encoding.Deflate
+import spray.httpx.marshalling.Marshaller
+import spray.httpx.unmarshalling._
+import spray.json.{ JsonParser, JsValue }
+import spray.json.lenses.JsonLenses._
+import spray.json.DefaultJsonProtocol._
+import spray.client.pipelining._
+
+import scala.concurrent.{ Future, ExecutionContext }
+import scala.util.control.NoStackTrace
+
+class ApiMethodClient(host: String, val runID: Option[Long], agentSettings: AgentSettings, httpTransport: ActorRef)(implicit exeContext: ExecutionContext) {
+
+ implicit val to = agentSettings.operationTimeout
+
+ val baseQuery = Query(runID.map(ri ⇒ Map("run_id" -> String.valueOf(ri))).getOrElse(Map.empty[String, String]) +
+ ("license_key" -> agentSettings.licenseKey) +
+ ("marshal_format" -> "json") +
+ ("protocol_version" -> "12"))
+
+ // New Relic responses contain JSON but with text/plain content type :(.
+ implicit val JsValueUnmarshaller = Unmarshaller[JsValue](MediaTypes.`application/json`, MediaTypes.`text/plain`) {
+ case x: HttpEntity.NonEmpty ⇒
+ JsonParser(x.asString(defaultCharset = HttpCharsets.`UTF-8`))
+ }
+
+ val httpClient = encode(Deflate) ~> sendReceive(httpTransport) ~> decode(Deflate) ~> unmarshal[JsValue]
+ val baseCollectorUri = Uri("/agent_listener/invoke_raw_method").withHost(host).withScheme("http")
+
+ def invokeMethod[T: Marshaller](method: String, payload: T): Future[JsValue] = {
+ val methodQuery = ("method" -> method) +: baseQuery
+
+ httpClient(Post(baseCollectorUri.withQuery(methodQuery), payload)) map { jsResponse ⇒
+ jsResponse.extract[String]('exception.? / 'error_type.?).map(_ match {
+ case CollectorErrors.`ForceRestart` ⇒ throw AgentRestartRequiredException
+ case CollectorErrors.`ForceShutdown` ⇒ throw AgentShutdownRequiredException
+ case anyOtherError ⇒
+ val errorMessage = jsResponse.extract[String]('exception / 'message.?).getOrElse("no message")
+ throw NewRelicException(anyOtherError, errorMessage)
+ })
+
+ jsResponse
+ }
+ }
+}
+
+object ApiMethodClient {
+ case class NewRelicException(exceptionType: String, message: String) extends RuntimeException with NoStackTrace
+ case object AgentRestartRequiredException extends RuntimeException with NoStackTrace
+ case object AgentShutdownRequiredException extends RuntimeException with NoStackTrace
+}
+
+object RawMethods {
+ val GetRedirectHost = "get_redirect_host"
+ val Connect = "connect"
+ val MetricData = "metric_data"
+}
+
+object CollectorErrors {
+ val ForceRestart = "NewRelic::Agent::ForceRestartException"
+ val ForceShutdown = "NewRelic::Agent::ForceDisconnectException"
+}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/ClientPipelines.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/ClientPipelines.scala
deleted file mode 100644
index ca003646..00000000
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/ClientPipelines.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package kamon.newrelic
-
-import akka.actor.ActorRef
-import akka.util.Timeout
-import spray.http.{ HttpResponse, HttpRequest }
-import spray.httpx.RequestBuilding
-import spray.httpx.encoding.Deflate
-import spray.json._
-import spray.client.pipelining.sendReceive
-
-import scala.concurrent.{ ExecutionContext, Future }
-
-trait ClientPipelines extends RequestBuilding {
-
- def compressedPipeline(transport: ActorRef)(implicit ec: ExecutionContext, to: Timeout): HttpRequest ⇒ Future[HttpResponse] =
- encode(Deflate) ~> sendReceive(transport)
-
- def compressedToJsonPipeline(transport: ActorRef)(implicit ec: ExecutionContext, to: Timeout): HttpRequest ⇒ Future[JsValue] =
- compressedPipeline(transport) ~> toJson
-
- def toJson(response: HttpResponse): JsValue = response.entity.asString.parseJson
-
-}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
index 84472593..3b1b8cb3 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
@@ -16,18 +16,17 @@
package kamon.newrelic
-import kamon.metric.UserMetrics.UserMetricGroup
-import kamon.metric._
-import kamon.newrelic.Agent.Settings
+import kamon.metric.{ UserMetricsExtensionImpl, EntitySnapshot, Entity }
+import kamon.metric.instrument.CollectionContext
object CustomMetricExtractor extends MetricExtractor {
- def extract(settings: Settings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData] = {
- metrics.collect {
- case (mg: UserMetricGroup, groupSnapshot) ⇒
- groupSnapshot.metrics collect {
- case (name, snapshot) ⇒ Metric.fromKamonMetricSnapshot(snapshot, s"Custom/${mg.name}", None, Scale.Unit)
- }
- }.flatten.toMap
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData] = {
+ metrics.get(UserMetricsExtensionImpl.UserMetricEntity).map { allUserMetrics ⇒
+ allUserMetrics.metrics.map {
+ case (key, snapshot) ⇒ Metric(snapshot, key.unitOfMeasurement, s"Custom/${key.name}", None)
+ }
+
+ } getOrElse (Map.empty)
}
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
index 26e8839e..6e16b975 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
@@ -15,18 +15,18 @@
* ========================================================== */
package kamon.newrelic
+import kamon.util.Timestamp
import spray.json._
-import kamon.newrelic.Agent._
object JsonProtocol extends DefaultJsonProtocol {
- implicit object ConnectJsonWriter extends RootJsonWriter[Settings] {
- def write(obj: Settings): JsValue =
+ implicit object ConnectJsonWriter extends RootJsonWriter[AgentSettings] {
+ def write(obj: AgentSettings): JsValue =
JsArray(
JsObject(
"agent_version" -> JsString("3.1.0"),
"app_name" -> JsArray(JsString(obj.appName)),
- "host" -> JsString(obj.host),
+ "host" -> JsString(obj.hostname),
"identifier" -> JsString(s"java:${obj.appName}"),
"language" -> JsString("java"),
"pid" -> JsNumber(obj.pid)))
@@ -87,8 +87,8 @@ object JsonProtocol extends DefaultJsonProtocol {
def read(json: JsValue): MetricBatch = json match {
case JsArray(elements) ⇒
val runID = elements(0).convertTo[Long]
- val timeSliceFrom = elements(1).convertTo[Long]
- val timeSliceTo = elements(2).convertTo[Long]
+ val timeSliceFrom = new Timestamp(elements(1).convertTo[Long])
+ val timeSliceTo = new Timestamp(elements(2).convertTo[Long])
val metrics = elements(3).convertTo[Seq[Metric]]
MetricBatch(runID, TimeSliceMetrics(timeSliceFrom, timeSliceTo, metrics.toMap))
@@ -99,8 +99,8 @@ object JsonProtocol extends DefaultJsonProtocol {
def write(obj: MetricBatch): JsValue =
JsArray(
JsNumber(obj.runID),
- JsNumber(obj.timeSliceMetrics.from),
- JsNumber(obj.timeSliceMetrics.to),
+ JsNumber(obj.timeSliceMetrics.from.seconds),
+ JsNumber(obj.timeSliceMetrics.to.seconds),
obj.timeSliceMetrics.metrics.toSeq.toJson)
}
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
index 14541483..20204b79 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
@@ -1,7 +1,8 @@
package kamon.newrelic
-import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metric.{ MetricSnapshot, Scale }
+import kamon.metric.instrument._
+import kamon.metric.MetricKey
+import kamon.util.{ MapMerge, Timestamp }
case class MetricID(name: String, scope: Option[String])
case class MetricData(callCount: Long, total: Double, totalExclusive: Double, min: Double, max: Double, sumOfSquares: Double) {
@@ -17,16 +18,23 @@ case class MetricData(callCount: Long, total: Double, totalExclusive: Double, mi
object Metric {
- def fromKamonMetricSnapshot(snapshot: MetricSnapshot, name: String, scope: Option[String], targetScale: Scale): Metric = {
+ def scaleFunction(uom: UnitOfMeasurement): Long ⇒ Double = uom match {
+ case time: Time ⇒ time.scale(Time.Seconds)
+ case other ⇒ _.toDouble
+ }
+
+ def apply(snapshot: InstrumentSnapshot, snapshotUnit: UnitOfMeasurement, name: String, scope: Option[String]): Metric = {
snapshot match {
case hs: Histogram.Snapshot ⇒
var total: Double = 0D
var sumOfSquares: Double = 0D
- val scaledMin = Scale.convert(hs.scale, targetScale, hs.min)
- val scaledMax = Scale.convert(hs.scale, targetScale, hs.max)
+ val scaler = scaleFunction(snapshotUnit)
+
+ val scaledMin = scaler(hs.min)
+ val scaledMax = scaler(hs.max)
hs.recordsIterator.foreach { record ⇒
- val scaledValue = Scale.convert(hs.scale, targetScale, record.level)
+ val scaledValue = scaler(record.level)
total += scaledValue * record.count
sumOfSquares += (scaledValue * scaledValue) * record.count
@@ -40,13 +48,13 @@ object Metric {
}
}
-case class TimeSliceMetrics(from: Long, to: Long, metrics: Map[MetricID, MetricData]) {
- import kamon.metric.combineMaps
+case class TimeSliceMetrics(from: Timestamp, to: Timestamp, metrics: Map[MetricID, MetricData]) {
+ import MapMerge.Syntax
def merge(that: TimeSliceMetrics): TimeSliceMetrics = {
- val mergedFrom = math.min(from, that.from)
- val mergedTo = math.max(to, that.to)
- val mergedMetrics = combineMaps(metrics, that.metrics)((l, r) ⇒ l.merge(r))
+ val mergedFrom = Timestamp.earlier(from, that.from)
+ val mergedTo = Timestamp.later(to, that.to)
+ val mergedMetrics = metrics.merge(that.metrics, (l, r) ⇒ l.merge(r))
TimeSliceMetrics(mergedFrom, mergedTo, mergedMetrics)
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
index 9742ed09..842fbdc6 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
@@ -1,103 +1,109 @@
package kamon.newrelic
-import java.util.concurrent.TimeUnit
-
import akka.actor.{ Props, ActorLogging, Actor }
import akka.pattern.pipe
import akka.io.IO
-import akka.util.Timeout
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.UserMetrics.{ UserGauges, UserMinMaxCounters, UserCounters, UserHistograms }
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
-import kamon.newrelic.MetricReporter.{ UnexpectedStatusCodeException, PostFailed, PostSucceeded, MetricDataPostResult }
+import kamon.metric.instrument.CollectionContext
+import kamon.newrelic.ApiMethodClient.{ AgentShutdownRequiredException, AgentRestartRequiredException }
+import kamon.newrelic.MetricReporter.{ PostFailed, PostSucceeded }
import spray.can.Http
-import spray.http.Uri
import spray.httpx.SprayJsonSupport
-import scala.concurrent.Future
import scala.concurrent.duration._
-import scala.util.control.NoStackTrace
-
-class MetricReporter(settings: Agent.Settings, runID: Long, baseUri: Uri) extends Actor
- with ClientPipelines with ActorLogging with SprayJsonSupport {
+import JsonProtocol._
- import JsonProtocol._
- import MetricReporter.Extractors
+class MetricReporter(settings: AgentSettings) extends Actor with ActorLogging with SprayJsonSupport {
import context.dispatcher
- val metricDataQuery = ("method" -> "metric_data") +: ("run_id" -> runID.toString) +: baseUri.query
- val metricDataUri = baseUri.withQuery(metricDataQuery)
-
- implicit val operationTimeout = Timeout(30 seconds)
- val metricsExtension = Kamon(Metrics)(context.system)
+ val metricsExtension = Kamon.metrics
val collectionContext = metricsExtension.buildDefaultCollectionContext
- val collectorClient = compressedPipeline(IO(Http)(context.system))
-
- val subscriber = {
- val tickInterval = context.system.settings.config.getMilliseconds("kamon.metrics.tick-interval")
- if (tickInterval == 60000)
- self
- else
- context.actorOf(TickMetricSnapshotBuffer.props(1 minute, self), "metric-buffer")
+ val metricsSubscriber = {
+ val tickInterval = Kamon.metrics.settings.tickInterval.toMillis
+
+ // Metrics are always sent to New Relic in 60 seconds intervals.
+ if (tickInterval == 60000) self
+ else context.actorOf(TickMetricSnapshotBuffer.props(1 minute, self), "metric-buffer")
}
- // Subscribe to Trace Metrics
- metricsExtension.subscribe(TraceMetrics, "*", subscriber, permanently = true)
+ subscribeToMetrics()
- // Subscribe to all User Metrics
- metricsExtension.subscribe(UserHistograms, "*", subscriber, permanently = true)
- metricsExtension.subscribe(UserCounters, "*", subscriber, permanently = true)
- metricsExtension.subscribe(UserMinMaxCounters, "*", subscriber, permanently = true)
- metricsExtension.subscribe(UserGauges, "*", subscriber, permanently = true)
+ def receive = awaitingConfiguration(None)
- def receive = reporting(None)
+ def awaitingConfiguration(bufferedMetrics: Option[TimeSliceMetrics]): Receive = {
+ case Agent.Configure(collector, runID) ⇒ startReporting(collector, runID, bufferedMetrics)
+ case Agent.ResetConfiguration ⇒ // Stay waiting.
+ case tickSnapshot: TickMetricSnapshot ⇒ keepWaitingForConfig(tickSnapshot, bufferedMetrics)
+ case PostSucceeded ⇒ // Ignore
+ case PostFailed(reason) ⇒ // Ignore any problems until we get a new configuration
+ }
+
+ def reporting(apiClient: ApiMethodClient, bufferedMetrics: Option[TimeSliceMetrics]): Receive = {
+ case tick: TickMetricSnapshot ⇒ sendMetricData(apiClient, tick, bufferedMetrics)
+ case PostSucceeded ⇒ context become reporting(apiClient, None)
+ case PostFailed(reason) ⇒ processCollectorFailure(reason)
+ case Agent.ResetConfiguration ⇒ context become awaitingConfiguration(bufferedMetrics)
+ }
- def reporting(pendingMetrics: Option[TimeSliceMetrics]): Receive = {
- case TickMetricSnapshot(from, to, metrics) ⇒
- val fromInSeconds = (from / 1E3).toInt
- val toInSeconds = (to / 1E3).toInt
- val extractedMetrics = Extractors.flatMap(_.extract(settings, collectionContext, metrics)).toMap
- val tickMetrics = TimeSliceMetrics(fromInSeconds, toInSeconds, extractedMetrics)
+ def sendMetricData(apiClient: ApiMethodClient, tick: TickMetricSnapshot, bufferedMetrics: Option[TimeSliceMetrics]): Unit = {
+ val metricsToReport = merge(convertToTimeSliceMetrics(tick), bufferedMetrics)
+ val customMarshaller = sprayJsonMarshaller(MetricBatchWriter, NewRelicJsonPrinter)
- val metricsToReport = pendingMetrics.foldLeft(tickMetrics)((p, n) ⇒ p.merge(n))
- context become reporting(Some(metricsToReport))
- pipe(sendMetricData(metricsToReport)) to self
+ if (log.isDebugEnabled)
+ log.debug("Sending [{}] metrics to New Relic for the time slice between {} and {}.", metricsToReport.metrics.size,
+ metricsToReport.from, metricsToReport.to)
- case PostSucceeded ⇒
- context become (reporting(None))
+ pipe {
+ apiClient.invokeMethod(RawMethods.MetricData, MetricBatch(apiClient.runID.get, metricsToReport))(customMarshaller)
+ .map { _ ⇒ PostSucceeded }
+ .recover { case error ⇒ PostFailed(error) }
+ } to self
- case PostFailed(reason) ⇒
- log.error(reason, "Metric POST to the New Relic collector failed, metrics will be accumulated with the next tick.")
+ context become reporting(apiClient, Some(metricsToReport))
}
- def sendMetricData(slice: TimeSliceMetrics): Future[MetricDataPostResult] = {
- log.debug("Sending [{}] metrics to New Relic for the time slice between {} and {}.", slice.metrics.size, slice.from, slice.to)
+ def processCollectorFailure(failureReason: Throwable): Unit = failureReason match {
+ case AgentRestartRequiredException ⇒ context.parent ! Agent.Reconnect
+ case AgentShutdownRequiredException ⇒ context.parent ! Agent.Shutdown
+ case anyOtherFailure ⇒
+ log.error(anyOtherFailure, "Metric POST to the New Relic collector failed, metrics will be accumulated with the next tick.")
+ }
- collectorClient {
- Post(metricDataUri, MetricBatch(runID, slice))(sprayJsonMarshaller(MetricBatchWriter, NewRelicJsonPrinter))
+ def startReporting(collector: String, runID: Long, bufferedMetrics: Option[TimeSliceMetrics]): Unit = {
+ val apiClient = new ApiMethodClient(collector, Some(runID), settings, IO(Http)(context.system))
+ context become reporting(apiClient, bufferedMetrics)
+ }
+
+ def keepWaitingForConfig(tickSnapshot: TickMetricSnapshot, bufferedMetrics: Option[TimeSliceMetrics]): Unit = {
+ val timeSliceMetrics = convertToTimeSliceMetrics(tickSnapshot)
+ context become awaitingConfiguration(Some(merge(timeSliceMetrics, bufferedMetrics)))
+ }
+
+ def merge(tsm: TimeSliceMetrics, buffered: Option[TimeSliceMetrics]): TimeSliceMetrics =
+ buffered.foldLeft(tsm)((p, n) ⇒ p.merge(n))
- } map { response ⇒
- if (response.status.isSuccess)
- PostSucceeded
- else
- PostFailed(new UnexpectedStatusCodeException(s"Received unsuccessful status code [${response.status.value}] from collector."))
- } recover { case t: Throwable ⇒ PostFailed(t) }
+ def convertToTimeSliceMetrics(tick: TickMetricSnapshot): TimeSliceMetrics = {
+ val extractedMetrics = MetricReporter.MetricExtractors.flatMap(_.extract(settings, collectionContext, tick.metrics)).toMap
+ TimeSliceMetrics(tick.from.toTimestamp, tick.to.toTimestamp, extractedMetrics)
+ }
+
+ def subscribeToMetrics(): Unit = {
+ metricsExtension.subscribe("trace", "*", metricsSubscriber, permanently = true)
+ metricsExtension.subscribe("user-metrics", "*", metricsSubscriber, permanently = true)
}
}
object MetricReporter {
- val Extractors: List[MetricExtractor] = WebTransactionMetricExtractor :: CustomMetricExtractor :: Nil
-
- def props(settings: Agent.Settings, runID: Long, baseUri: Uri): Props =
- Props(new MetricReporter(settings, runID, baseUri))
+ def props(settings: AgentSettings): Props = Props(new MetricReporter(settings))
sealed trait MetricDataPostResult
case object PostSucceeded extends MetricDataPostResult
case class PostFailed(reason: Throwable) extends MetricDataPostResult
- class UnexpectedStatusCodeException(message: String) extends RuntimeException(message) with NoStackTrace
+ val MetricExtractors: List[MetricExtractor] = WebTransactionMetricExtractor :: CustomMetricExtractor :: Nil
}
trait MetricExtractor {
- def extract(settings: Agent.Settings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData]
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData]
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
index 08fdc8c4..7f56d931 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
@@ -21,7 +21,8 @@ import java.util
import akka.actor.{ Actor, ActorLogging }
import akka.event.Logging.{ Error, InitializeLogger, LoggerInitialized }
import com.newrelic.api.agent.{ NewRelic ⇒ NR }
-import kamon.trace.{ TraceRecorder, TraceContextAware }
+import kamon.trace.TraceLocal.HttpContextKey
+import kamon.trace.{ TraceContext, TraceLocal, TraceContextAware }
trait CustomParamsSupport {
this: NewRelicErrorLogger ⇒
@@ -40,9 +41,20 @@ class NewRelicErrorLogger extends Actor with ActorLogging with CustomParamsSuppo
def notifyError(error: Error): Unit = runInFakeTransaction {
val params = new util.HashMap[String, String]()
- val ctx = error.asInstanceOf[TraceContextAware].traceContext
- params put ("TraceToken", ctx.token)
+ if (error.isInstanceOf[TraceContextAware]) {
+ val ctx = error.asInstanceOf[TraceContextAware].traceContext
+ val httpContext = TraceLocal.retrieve(HttpContextKey)
+
+ params put ("TraceToken", ctx.token)
+
+ httpContext.map { httpCtx ⇒
+ params put ("User-Agent", httpCtx.agent)
+ params put ("X-Forwarded-For", httpCtx.xforwarded)
+ params put ("Request-URI", httpCtx.uri)
+ }
+ }
+
customParams foreach { case (k, v) ⇒ params.put(k, v) }
if (error.cause == Error.NoCause) NR.noticeError(error.message.toString, params)
@@ -52,7 +64,7 @@ class NewRelicErrorLogger extends Actor with ActorLogging with CustomParamsSuppo
//Really ugly, but temporal hack until next release...
def runInFakeTransaction[T](thunk: ⇒ T): T = {
val oldName = Thread.currentThread.getName
- Thread.currentThread.setName(TraceRecorder.currentContext.name)
+ Thread.currentThread.setName(TraceContext.currentContext.name)
try thunk finally Thread.currentThread.setName(oldName)
}
} \ No newline at end of file
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
index 0a4a516b..d0144f4b 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
@@ -16,77 +16,81 @@
package kamon.newrelic
-import scala.collection.mutable;
-import kamon.metric._
-import kamon.metric.TraceMetrics.{ TraceMetricsSnapshot, ElapsedTime }
-import kamon.metric.instrument.Histogram
-import kamon.trace.SegmentCategory.HttpClient
-import kamon.trace.SegmentMetricIdentity
+import kamon.metric.{ EntitySnapshot, Entity }
+
+import scala.collection.mutable
+import kamon.metric.instrument.{ Time, CollectionContext, Histogram }
object WebTransactionMetricExtractor extends MetricExtractor {
- def extract(settings: Agent.Settings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData] = {
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData] = {
val apdexBuilder = new ApdexBuilder("Apdex", None, settings.apdexT)
// Trace metrics are recorded in nanoseconds.
- var accumulatedHttpDispatcher: Histogram.Snapshot = Histogram.Snapshot.empty(Scale.Nano)
- var accumulatedExternalServices: Histogram.Snapshot = Histogram.Snapshot.empty(Scale.Nano)
+ var accumulatedHttpDispatcher: Histogram.Snapshot = Histogram.Snapshot.empty
+ var accumulatedExternalServices: Histogram.Snapshot = Histogram.Snapshot.empty
val externalByHostSnapshots = mutable.Map.empty[String, List[Histogram.Snapshot]]
val externalByHostAndLibrarySnapshots = mutable.Map.empty[(String, String), List[Histogram.Snapshot]]
val externalScopedByHostAndLibrarySnapshots = mutable.Map.empty[(String, String, String), List[Histogram.Snapshot]]
- val transactionMetrics = metrics.collect {
- case (TraceMetrics(traceName), tms: TraceMetricsSnapshot) ⇒
-
- tms.segments.foreach {
- case (SegmentMetricIdentity(segmentName, category, library), snapshot: Histogram.Snapshot) if category.equals(HttpClient) ⇒
- accumulatedExternalServices = accumulatedExternalServices.merge(snapshot, collectionContext)
+ val transactionMetrics = metrics.filterKeys(_.category == "trace").map {
+ case (entity: Entity, es: EntitySnapshot) ⇒
+ // Trace metrics only have elapsed-time and segments and all of them are Histograms.
+ es.histograms.foreach {
+ case (key, segmentSnapshot) if key.metadata.get("category").filter(_ == "http-client").nonEmpty ⇒
+ val library = key.metadata("library")
+ accumulatedExternalServices = accumulatedExternalServices.merge(segmentSnapshot, collectionContext)
// Accumulate externals by host
- externalByHostSnapshots.update(segmentName, snapshot :: externalByHostSnapshots.getOrElse(segmentName, Nil))
+ externalByHostSnapshots.update(key.name, segmentSnapshot :: externalByHostSnapshots.getOrElse(key.name, Nil))
// Accumulate externals by host and library
- externalByHostAndLibrarySnapshots.update((segmentName, library),
- snapshot :: externalByHostAndLibrarySnapshots.getOrElse((segmentName, library), Nil))
+ externalByHostAndLibrarySnapshots.update((key.name, library),
+ segmentSnapshot :: externalByHostAndLibrarySnapshots.getOrElse((key.name, library), Nil))
// Accumulate externals by host and library, including the transaction as scope.
- externalScopedByHostAndLibrarySnapshots.update((segmentName, library, traceName),
- snapshot :: externalScopedByHostAndLibrarySnapshots.getOrElse((segmentName, library, traceName), Nil))
+ externalScopedByHostAndLibrarySnapshots.update((key.name, library, entity.name),
+ segmentSnapshot :: externalScopedByHostAndLibrarySnapshots.getOrElse((key.name, library, entity.name), Nil))
- }
+ case otherSegments ⇒
- accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(tms.elapsedTime, collectionContext)
- tms.elapsedTime.recordsIterator.foreach { record ⇒
- apdexBuilder.record(Scale.convert(tms.elapsedTime.scale, Scale.Unit, record.level), record.count)
}
- Metric.fromKamonMetricSnapshot(tms.elapsedTime, "WebTransaction/Custom/" + traceName, None, Scale.Unit)
- }
+ es.histograms.collect {
+ case (key, elapsedTime) if key.name == "elapsed-time" ⇒
+ accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(elapsedTime, collectionContext)
+ elapsedTime.recordsIterator.foreach { record ⇒
+ apdexBuilder.record(Time.Nanoseconds.scale(Time.Seconds)(record.level), record.count)
+ }
+
+ Metric(elapsedTime, key.unitOfMeasurement, "WebTransaction/Custom/" + entity.name, None)
+ }
+ } flatten
- val httpDispatcher = Metric.fromKamonMetricSnapshot(accumulatedHttpDispatcher, "HttpDispatcher", None, Scale.Unit)
+ val httpDispatcher = Metric(accumulatedHttpDispatcher, Time.Seconds, "HttpDispatcher", None)
val webTransaction = httpDispatcher.copy(MetricID("WebTransaction", None))
val webTransactionTotal = httpDispatcher.copy(MetricID("WebTransactionTotalTime", None))
- val externalAllWeb = Metric.fromKamonMetricSnapshot(accumulatedExternalServices, "External/allWeb", None, Scale.Unit)
+ val externalAllWeb = Metric(accumulatedExternalServices, Time.Seconds, "External/allWeb", None)
val externalAll = externalAllWeb.copy(MetricID("External/all", None))
val externalByHost = externalByHostSnapshots.map {
case (host, snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/all", None, Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/all", None)
}
val externalByHostAndLibrary = externalByHostAndLibrarySnapshots.map {
case ((host, library), snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/$library", None, Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/$library", None)
}
val externalScopedByHostAndLibrary = externalScopedByHostAndLibrarySnapshots.map {
case ((host, library, traceName), snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/$library", Some("WebTransaction/Custom/" + traceName), Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/$library", Some("WebTransaction/Custom/" + traceName))
}
Map(httpDispatcher, webTransaction, webTransactionTotal, externalAllWeb, externalAll, apdexBuilder.build) ++
diff --git a/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala b/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
index 7db9f2d0..3e15e9fd 100644
--- a/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
+++ b/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
@@ -22,7 +22,6 @@ import akka.actor.{ ActorRef, ActorSystem, Props }
import akka.io.IO
import akka.testkit._
import com.typesafe.config.ConfigFactory
-import kamon.AkkaExtensionSwap
import org.scalatest.{ BeforeAndAfterAll, WordSpecLike }
import spray.can.Http
import spray.http._
@@ -30,6 +29,7 @@ import spray.httpx.encoding.Deflate
import spray.httpx.{ SprayJsonSupport, RequestBuilding }
import spray.json.JsArray
import spray.json._
+import testkit.AkkaExtensionSwap
class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll with RequestBuilding with SprayJsonSupport {
import JsonProtocol._
@@ -44,9 +44,11 @@ class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll wit
| newrelic {
| app-name = kamon
| license-key = 1111111111
- | initialize-retry-delay = 1 second
- | max-initialize-retries = 3
+ | connect-retry-delay = 1 second
+ | max-connect-retries = 3
| }
+ |
+ | modules.kamon-newrelic.auto-start = no
|}
|
""".stripMargin))
@@ -88,7 +90,7 @@ class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll wit
})
// Receive the runID
- EventFilter.info(message = "Agent initialized with runID: [161221111] and collector: [collector-8.newrelic.com]", occurrences = 1).intercept {
+ EventFilter.info(message = "Configuring New Relic reporters to use runID: [161221111] and collector: [collector-8.newrelic.com]", occurrences = 1).intercept {
httpManager.reply(jsonResponse(
"""
| {
@@ -147,7 +149,7 @@ class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll wit
// Receive the runID
EventFilter.info(
- message = "Agent initialized with runID: [161221112] and collector: [collector-8.newrelic.com]", occurrences = 1).intercept {
+ message = "Configuring New Relic reporters to use runID: [161221112] and collector: [collector-8.newrelic.com]", occurrences = 1).intercept {
httpManager.reply(jsonResponse(
"""
@@ -184,7 +186,7 @@ class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll wit
})
// Give up on connecting.
- EventFilter[RuntimeException](message = "Giving up while trying to set up a connection with the New Relic collector.", occurrences = 1).intercept {
+ EventFilter.error(message = "Giving up while trying to set up a connection with the New Relic collector. The New Relic module is shutting down itself.", occurrences = 1).intercept {
httpManager.reply(Timedout(request))
}
}
diff --git a/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala b/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
index 0001072e..04380677 100644
--- a/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
+++ b/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
@@ -16,40 +16,46 @@
package kamon.newrelic
-import akka.actor.{ ActorRef, ActorSystem }
+import akka.actor.ActorRef
import akka.io.IO
import akka.testkit._
+import akka.util.Timeout
import com.typesafe.config.ConfigFactory
-import kamon.metric.{ TraceMetrics, Metrics }
-import kamon.{ Kamon, AkkaExtensionSwap }
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.{ Entity, TraceMetrics }
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import spray.can.Http
import spray.http.Uri.Query
import spray.http._
import spray.httpx.encoding.Deflate
-import spray.httpx.{ RequestBuilding, SprayJsonSupport }
+import spray.httpx.SprayJsonSupport
+import testkit.AkkaExtensionSwap
import scala.concurrent.duration._
import spray.json._
-class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding with SprayJsonSupport {
+class MetricReporterSpec extends BaseKamonSpec("metric-reporter-spec") with SprayJsonSupport {
import kamon.newrelic.JsonProtocol._
- implicit lazy val system: ActorSystem = ActorSystem("metric-reporter-spec", ConfigFactory.parseString(
- """
- |akka {
- | loggers = ["akka.testkit.TestEventListener"]
- | loglevel = "INFO"
- |}
- |kamon {
- | metric {
- | tick-interval = 1 hour
- | }
- |}
- |
- """.stripMargin))
-
- val agentSettings = Agent.Settings("1111111111", "kamon", "test-host", 1, 1, 30 seconds, 1D)
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loggers = ["akka.testkit.TestEventListener"]
+ | loglevel = "INFO"
+ |}
+ |kamon {
+ | metric {
+ | tick-interval = 1 hour
+ | }
+ |
+ | modules.kamon-newrelic.auto-start = no
+ |}
+ |
+ """.stripMargin)
+
+ val agentSettings = AgentSettings("1111111111", "kamon", "test-host", 1, Timeout(5 seconds), 1, 30 seconds, 1D)
val baseQuery = Query(
"license_key" -> agentSettings.licenseKey,
"marshal_format" -> "json",
@@ -59,8 +65,9 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
"the MetricReporter" should {
"report metrics to New Relic upon arrival" in new FakeTickSnapshotsFixture {
val httpManager = setHttpManager(TestProbe())
- val metricReporter = system.actorOf(MetricReporter.props(agentSettings, 9999, baseCollectorUri))
+ val metricReporter = system.actorOf(MetricReporter.props(agentSettings))
+ metricReporter ! Agent.Configure("collector-1.newrelic.com", 9999)
metricReporter ! firstSnapshot
val metricPost = httpManager.expectMsgType[HttpRequest]
@@ -70,8 +77,8 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
val postedBatch = Deflate.decode(metricPost).entity.asString.parseJson.convertTo[MetricBatch]
postedBatch.runID should be(9999)
- postedBatch.timeSliceMetrics.from should be(1415587618)
- postedBatch.timeSliceMetrics.to should be(1415587678)
+ postedBatch.timeSliceMetrics.from.seconds should be(1415587618)
+ postedBatch.timeSliceMetrics.to.seconds should be(1415587678)
val metrics = postedBatch.timeSliceMetrics.metrics
metrics(MetricID("Apdex", None)).callCount should be(3)
@@ -81,8 +88,9 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
"accumulate metrics if posting fails" in new FakeTickSnapshotsFixture {
val httpManager = setHttpManager(TestProbe())
- val metricReporter = system.actorOf(MetricReporter.props(agentSettings, 9999, baseCollectorUri))
+ val metricReporter = system.actorOf(MetricReporter.props(agentSettings))
+ metricReporter ! Agent.Configure("collector-1.newrelic.com", 9999)
metricReporter ! firstSnapshot
val request = httpManager.expectMsgType[HttpRequest]
httpManager.reply(Timedout(request))
@@ -96,8 +104,8 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
val postedBatch = Deflate.decode(metricPost).entity.asString.parseJson.convertTo[MetricBatch]
postedBatch.runID should be(9999)
- postedBatch.timeSliceMetrics.from should be(1415587618)
- postedBatch.timeSliceMetrics.to should be(1415587738)
+ postedBatch.timeSliceMetrics.from.seconds should be(1415587618)
+ postedBatch.timeSliceMetrics.to.seconds should be(1415587738)
val metrics = postedBatch.timeSliceMetrics.metrics
metrics(MetricID("Apdex", None)).callCount should be(6)
@@ -130,20 +138,20 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
}
trait FakeTickSnapshotsFixture {
- val testTraceID = TraceMetrics("example-trace")
- val recorder = Kamon(Metrics).register(testTraceID, TraceMetrics.Factory).get
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
+ val testTraceID = Entity("example-trace", "trace")
+ val recorder = Kamon.metrics.register(TraceMetrics, testTraceID.name).get.recorder
+ val collectionContext = Kamon.metrics.buildDefaultCollectionContext
def collectRecorder = recorder.collect(collectionContext)
- recorder.elapsedTime.record(1000000)
- recorder.elapsedTime.record(2000000)
- recorder.elapsedTime.record(3000000)
- val firstSnapshot = TickMetricSnapshot(1415587618000L, 1415587678000L, Map(testTraceID -> collectRecorder))
+ recorder.ElapsedTime.record(1000000)
+ recorder.ElapsedTime.record(2000000)
+ recorder.ElapsedTime.record(3000000)
+ val firstSnapshot = TickMetricSnapshot(new MilliTimestamp(1415587618000L), new MilliTimestamp(1415587678000L), Map(testTraceID -> collectRecorder))
- recorder.elapsedTime.record(6000000)
- recorder.elapsedTime.record(5000000)
- recorder.elapsedTime.record(4000000)
- val secondSnapshot = TickMetricSnapshot(1415587678000L, 1415587738000L, Map(testTraceID -> collectRecorder))
+ recorder.ElapsedTime.record(6000000)
+ recorder.ElapsedTime.record(5000000)
+ recorder.ElapsedTime.record(4000000)
+ val secondSnapshot = TickMetricSnapshot(new MilliTimestamp(1415587678000L), new MilliTimestamp(1415587738000L), Map(testTraceID -> collectRecorder))
}
} \ No newline at end of file
diff --git a/kamon-play/src/main/resources/reference.conf b/kamon-play/src/main/resources/reference.conf
index 5ad070ce..88f1de26 100644
--- a/kamon-play/src/main/resources/reference.conf
+++ b/kamon-play/src/main/resources/reference.conf
@@ -21,6 +21,13 @@ kamon {
# to traces and client http segments.
name-generator = kamon.play.DefaultPlayNameGenerator
- dispatcher = ${kamon.default-dispatcher}
+ }
+
+ modules {
+ kamon-play {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = "kamon.play.Play"
+ }
}
} \ No newline at end of file
diff --git a/kamon-play/src/main/scala/kamon/play/Play.scala b/kamon-play/src/main/scala/kamon/play/Play.scala
index 2184fa84..eb212940 100644
--- a/kamon-play/src/main/scala/kamon/play/Play.scala
+++ b/kamon-play/src/main/scala/kamon/play/Play.scala
@@ -20,7 +20,7 @@ import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProv
import akka.event.Logging
import kamon.Kamon
import kamon.http.HttpServerMetrics
-import kamon.metric.Metrics
+import kamon.metric.Entity
import play.api.libs.ws.WS.WSRequest
import play.api.mvc.RequestHeader
@@ -36,9 +36,15 @@ class PlayExtension(private val system: ExtendedActorSystem) extends Kamon.Exten
log.info(s"Starting the Kamon(Play) extension")
private val config = system.settings.config.getConfig("kamon.play")
+ val httpServerMetrics = {
+ val metricsExtension = Kamon.metrics
+ val factory = metricsExtension.instrumentFactory(HttpServerMetrics.category)
+ val entity = Entity("play-server", HttpServerMetrics.category)
- val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
- val defaultDispatcher = system.dispatchers.lookup(config.getString("dispatcher"))
+ metricsExtension.register(entity, new HttpServerMetrics(factory)).recorder
+ }
+
+ val defaultDispatcher = system.dispatcher
val includeTraceToken: Boolean = config.getBoolean("automatic-trace-token-propagation")
val traceTokenHeaderName: String = config.getString("trace-token-header-name")
@@ -55,6 +61,6 @@ trait PlayNameGenerator {
}
class DefaultPlayNameGenerator extends PlayNameGenerator {
- def generateTraceName(requestHeader: RequestHeader): String = requestHeader.method + ": " + requestHeader.uri
+ def generateTraceName(requestHeader: RequestHeader): String = s"${requestHeader.method}: ${requestHeader.uri}"
def generateHttpClientSegmentName(request: WSRequest): String = request.url
}
diff --git a/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala b/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
index 0e777fd5..55e39bed 100644
--- a/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
+++ b/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
@@ -16,13 +16,13 @@
package kamon.play.action
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
import play.api.mvc._
import scala.concurrent.Future
case class TraceName[A](name: String)(action: Action[A]) extends Action[A] {
def apply(request: Request[A]): Future[SimpleResult] = {
- TraceRecorder.rename(name)
+ TraceContext.currentContext.rename(name)
action(request)
}
lazy val parser = action.parser
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala
index e2ffd3f9..3c79fae4 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala
@@ -15,19 +15,12 @@
package kamon.play.instrumentation
-import kamon.trace._
+import kamon.trace.logging.MdcKeysSupport
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
-import org.slf4j.MDC
-import play.api.LoggerLike
@Aspect
-class LoggerLikeInstrumentation {
-
- import kamon.play.instrumentation.LoggerLikeInstrumentation._
-
- @DeclareMixin("play.api.LoggerLike+")
- def mixinContextAwareToLoggerLike: TraceContextAware = TraceContextAware.default
+class LoggerLikeInstrumentation extends MdcKeysSupport {
@Pointcut("execution(* play.api.LoggerLike+.info(..))")
def infoPointcut(): Unit = {}
@@ -41,35 +34,9 @@ class LoggerLikeInstrumentation {
@Pointcut("execution(* play.api.LoggerLike+.trace(..))")
def tracePointcut(): Unit = {}
- @Around("(infoPointcut() || warnPointcut() || errorPointcut() || tracePointcut()) && this(logger)")
- def aroundLog(pjp: ProceedingJoinPoint, logger: LoggerLike): Any = {
- withMDC {
- pjp.proceed()
- }
- }
-}
-
-object LoggerLikeInstrumentation {
-
- @inline final def withMDC[A](block: ⇒ A): A = {
- val keys = putAndExtractKeys(extractProperties(TraceRecorder.currentContext))
-
- try block finally keys.foreach(k ⇒ MDC.remove(k))
- }
-
- def putAndExtractKeys(values: Iterable[Map[String, Any]]): Iterable[String] = values.map {
- value ⇒ value.map { case (key, value) ⇒ MDC.put(key, value.toString); key }
- }.flatten
-
- def extractProperties(traceContext: TraceContext): Iterable[Map[String, Any]] = traceContext match {
- case ctx: DefaultTraceContext ⇒
- ctx.traceLocalStorage.underlyingStorage.values.collect {
- case traceLocalValue @ (p: Product) ⇒ {
- val properties = p.productIterator
- traceLocalValue.getClass.getDeclaredFields.filter(field ⇒ field.getName != "$outer").map(_.getName -> properties.next).toMap
- }
- }
- case EmptyTraceContext ⇒ Iterable.empty[Map[String, Any]]
+ @Around("(infoPointcut() || warnPointcut() || errorPointcut() || tracePointcut())")
+ def aroundLog(pjp: ProceedingJoinPoint): Any = withMdc {
+ pjp.proceed()
}
}
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
index b44e45a3..d98f6b95 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
@@ -17,7 +17,8 @@ package kamon.play.instrumentation
import kamon.Kamon
import kamon.play.{ Play, PlayExtension }
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.TraceLocal.{ HttpContextKey, HttpContext }
+import kamon.trace._
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
import play.api.mvc._
@@ -26,48 +27,49 @@ import play.libs.Akka
@Aspect
class RequestInstrumentation {
- import RequestInstrumentation.normaliseTraceName
+
+ import RequestInstrumentation._
@DeclareMixin("play.api.mvc.RequestHeader+")
def mixinContextAwareNewRequest: TraceContextAware = TraceContextAware.default
- @After("execution(* play.api.GlobalSettings+.onStart(*)) && args(application)")
- def afterApplicationStart(application: play.api.Application): Unit = {
- Kamon(Play)(Akka.system())
- }
+ @Before("call(* play.api.GlobalSettings.onRouteRequest(..)) && args(requestHeader)")
+ def beforeRouteRequest(requestHeader: RequestHeader): Unit = {
+ import Kamon.tracer
+ val playExtension = Kamon(Play)
- @Before("execution(* play.api.GlobalSettings+.onRouteRequest(..)) && args(requestHeader)")
- def onRouteRequest(requestHeader: RequestHeader): Unit = {
- val system = Akka.system()
- val playExtension = Kamon(Play)(system)
val defaultTraceName = playExtension.generateTraceName(requestHeader)
-
val token = if (playExtension.includeTraceToken) {
requestHeader.headers.toSimpleMap.find(_._1 == playExtension.traceTokenHeaderName).map(_._2)
} else None
- TraceRecorder.start(defaultTraceName, token)(system)
+ val newContext = token.map(t ⇒ tracer.newContext(defaultTraceName, t)).getOrElse(tracer.newContext(defaultTraceName))
+ TraceContext.setCurrentContext(newContext)
}
- @Around("execution(* play.api.GlobalSettings+.doFilter(*)) && args(next)")
+ @Around("call(* play.api.GlobalSettings.doFilter(*)) && args(next)")
def aroundDoFilter(pjp: ProceedingJoinPoint, next: EssentialAction): Any = {
val essentialAction = (requestHeader: RequestHeader) ⇒ {
- // TODO: Move to a Kamon-specific dispatcher.
- val executor = Kamon(Play)(Akka.system()).defaultDispatcher
+
+ val playExtension = Kamon(Play)
+ val executor = playExtension.defaultDispatcher
def onResult(result: SimpleResult): SimpleResult = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
+ TraceContext.map { ctx ⇒
ctx.finish()
- val playExtension = Kamon(Play)(system)
- recordHttpServerMetrics(result.header, ctx.name, playExtension)
+ recordHttpServerMetrics(result.header.status.toString, ctx.name)
if (playExtension.includeTraceToken) result.withHeaders(playExtension.traceTokenHeaderName -> ctx.token)
else result
} getOrElse result
}
+
+ //store in TraceLocal useful data to diagnose errors
+ storeDiagnosticData(requestHeader)
+
//override the current trace name
- normaliseTraceName(requestHeader).map(TraceRecorder.rename)
+ normaliseTraceName(requestHeader).map(TraceContext.currentContext.rename)
// Invoke the action
next(requestHeader).map(onResult)(executor)
@@ -75,21 +77,33 @@ class RequestInstrumentation {
pjp.proceed(Array(EssentialAction(essentialAction)))
}
- @Before("execution(* play.api.GlobalSettings+.onError(..)) && args(request, ex)")
- def beforeOnError(request: TraceContextAware, ex: Throwable): Unit = TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val playExtension = Kamon(Play)(system)
- playExtension.httpServerMetrics.recordResponse(ctx.name, "500")
+ @Before("call(* play.api.GlobalSettings.onError(..)) && args(request, ex)")
+ def beforeOnError(request: TraceContextAware, ex: Throwable): Unit = {
+ TraceContext.map { ctx ⇒
+ recordHttpServerMetrics("500", ctx.name)
+ }
}
- private def recordHttpServerMetrics(header: ResponseHeader, traceName: String, playExtension: PlayExtension): Unit =
- playExtension.httpServerMetrics.recordResponse(traceName, header.status.toString)
+ def recordHttpServerMetrics(status: String, traceName: String): Unit =
+ Kamon(Play).httpServerMetrics.recordResponse(traceName, status)
+
+ def storeDiagnosticData(request: RequestHeader): Unit = {
+ val agent = request.headers.get(UserAgent).getOrElse(Unknown)
+ val forwarded = request.headers.get(XForwardedFor).getOrElse(Unknown)
+
+ TraceLocal.store(HttpContextKey)(HttpContext(agent, request.uri, forwarded))
+ }
}
object RequestInstrumentation {
- import kamon.metric.Metrics.AtomicGetOrElseUpdateForTriemap
+ import kamon.util.TriemapAtomicGetOrElseUpdate.Syntax
import java.util.Locale
import scala.collection.concurrent.TrieMap
+ val UserAgent = "User-Agent"
+ val XForwardedFor = "X-Forwarded-For"
+ val Unknown = "unknown"
+
private val cache = TrieMap.empty[String, String]
def normaliseTraceName(requestHeader: RequestHeader): Option[String] = requestHeader.tags.get(Routes.ROUTE_VERB).map({ verb ⇒
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
index fdc7fb09..f1ceb5d3 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
@@ -18,10 +18,9 @@ package kamon.play.instrumentation
import kamon.Kamon
import kamon.play.Play
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity }
+import kamon.trace.{ TraceContext, SegmentCategory }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation.{ Around, Aspect, Pointcut }
-import kamon.trace.TraceRecorder
import play.api.libs.ws.WS.WSRequest
import scala.concurrent.Future
import play.api.libs.ws.Response
@@ -34,8 +33,8 @@ class WSInstrumentation {
@Around("onExecuteRequest(request)")
def aroundExecuteRequest(pjp: ProceedingJoinPoint, request: WSRequest): Any = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val playExtension = Kamon(Play)(system)
+ TraceContext.map { ctx ⇒
+ val playExtension = Kamon(Play)
val executor = playExtension.defaultDispatcher
val segmentName = playExtension.generateHttpClientSegmentName(request)
val segment = ctx.startSegment(segmentName, SegmentCategory.HttpClient, Play.SegmentLibraryName)
@@ -43,6 +42,6 @@ class WSInstrumentation {
response.map(result ⇒ segment.finish())(executor)
response
- } getOrElse (pjp.proceed())
+ } getOrElse pjp.proceed()
}
} \ No newline at end of file
diff --git a/kamon-play/src/test/resources/logback.xml b/kamon-play/src/test/resources/logback.xml
new file mode 100644
index 00000000..c336bbfe
--- /dev/null
+++ b/kamon-play/src/test/resources/logback.xml
@@ -0,0 +1,12 @@
+<configuration>
+ <statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="OFF">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/kamon-play/src/test/resources/logger.xml b/kamon-play/src/test/resources/logger.xml
deleted file mode 100644
index 84126e9d..00000000
--- a/kamon-play/src/test/resources/logger.xml
+++ /dev/null
@@ -1,16 +0,0 @@
-<configuration scan="true">
- <conversionRule conversionWord="traceToken" converterClass="kamon.trace.logging.LogbackTraceTokenConverter" />
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>%date{HH:mm:ss.SSS} %-5level [%traceToken][%X{akkaSource}] [%thread] %logger{55} - %msg%n</pattern>
- </encoder>
- </appender>
-
- <logger name="play" level="INFO" />
- <logger name="application" level="INFO" />
-
- <root level="INFO">
- <appender-ref ref="STDOUT" />
- </root>
-
-</configuration>
diff --git a/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala
index c41f7004..de85743c 100644
--- a/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala
@@ -19,7 +19,9 @@ import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.classic.{ AsyncAppender, LoggerContext }
import ch.qos.logback.core.read.ListAppender
import ch.qos.logback.core.status.NopStatusListener
+import kamon.Kamon
import kamon.trace.TraceLocal
+import kamon.trace.TraceLocal.AvailableToMdc
import org.scalatest.BeforeAndAfter
import org.scalatestplus.play._
import org.slf4j
@@ -28,11 +30,12 @@ import play.api.mvc.Results.Ok
import play.api.mvc._
import play.api.test.Helpers._
import play.api.test._
+import scala.concurrent.duration._
-import scala.concurrent.Future
+import scala.concurrent.{ Await, Future }
class LoggerLikeInstrumentationSpec extends PlaySpec with OneServerPerSuite with BeforeAndAfter {
-
+ Kamon.start()
System.setProperty("config.file", "./kamon-play/src/test/resources/conf/application.conf")
val executor = scala.concurrent.ExecutionContext.Implicits.global
@@ -41,11 +44,8 @@ class LoggerLikeInstrumentationSpec extends PlaySpec with OneServerPerSuite with
val headerValue = "My header value"
val otherValue = "My other value"
- case class LocalStorageValue(header: String, other: String)
-
- object TraceLocalKey extends TraceLocal.TraceLocalKey {
- type ValueType = LocalStorageValue
- }
+ val TraceLocalHeaderKey = AvailableToMdc("header")
+ val TraceLocalOtherKey = AvailableToMdc("other")
before {
LoggingHandler.startLogging()
@@ -60,7 +60,8 @@ class LoggerLikeInstrumentationSpec extends PlaySpec with OneServerPerSuite with
case ("GET", "/logging") ⇒
Action.async {
Future {
- TraceLocal.store(TraceLocalKey)(LocalStorageValue(headerValue, otherValue))
+ TraceLocal.store(TraceLocalHeaderKey)(headerValue)
+ TraceLocal.store(TraceLocalOtherKey)(otherValue)
LoggingHandler.info(infoMessage)
Ok("OK")
}(executor)
@@ -68,12 +69,13 @@ class LoggerLikeInstrumentationSpec extends PlaySpec with OneServerPerSuite with
})
"the LoggerLike instrumentation" should {
- "be put the properties of TraceLocal into the MDC as key -> value in a request" in {
+ "allow retrieve a value from the MDC when was created a key of type AvailableToMdc in the current request" in {
LoggingHandler.appenderStart()
- val Some(result) = route(FakeRequest(GET, "/logging"))
- Thread.sleep(500) // wait to complete the future
- TraceLocal.retrieve(TraceLocalKey) must be(Some(LocalStorageValue(headerValue, otherValue)))
+ Await.result(route(FakeRequest(GET, "/logging")).get, 500 millis)
+
+ TraceLocal.retrieve(TraceLocalHeaderKey) must be(Some(headerValue))
+ TraceLocal.retrieve(TraceLocalOtherKey) must be(Some(otherValue))
LoggingHandler.appenderStop()
diff --git a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
index aec319d5..7053c296 100644
--- a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
@@ -16,10 +16,10 @@
package kamon.play
import kamon.Kamon
-import kamon.http.HttpServerMetrics
-import kamon.metric.{ CollectionContext, Metrics, TraceMetrics }
+import kamon.metric.instrument.CollectionContext
import kamon.play.action.TraceName
-import kamon.trace.{ TraceLocal, TraceRecorder }
+import kamon.trace.TraceLocal.HttpContextKey
+import kamon.trace.{ TraceLocal, TraceContext }
import org.scalatestplus.play._
import play.api.DefaultGlobal
import play.api.http.Writeable
@@ -31,15 +31,15 @@ import play.api.test.Helpers._
import play.api.test._
import play.core.Router.{ HandlerDef, Route, Routes }
import play.core.{ DynamicPart, PathPattern, Router, StaticPart }
-import play.libs.Akka
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
-
+ Kamon.start()
System.setProperty("config.file", "./kamon-play/src/test/resources/conf/application.conf")
+ override lazy val port: Port = 19002
val executor = scala.concurrent.ExecutionContext.Implicits.global
implicit override lazy val app = FakeApplication(withGlobal = Some(MockGlobalTest), withRoutes = {
@@ -117,33 +117,43 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
"respond to the Async Action with X-Trace-Token and the renamed trace" in {
val result = Await.result(route(FakeRequest(GET, "/async-renamed").withHeaders(traceTokenHeader)).get, 10 seconds)
- TraceRecorder.currentContext.name must be("renamed-trace")
+ TraceContext.currentContext.name must be("renamed-trace")
Some(result.header.headers(traceTokenHeaderName)) must be(expectedToken)
}
"propagate the TraceContext and LocalStorage through of filters in the current request" in {
- val Some(result) = route(FakeRequest(GET, "/retrieve").withHeaders(traceTokenHeader, traceLocalStorageHeader))
+ route(FakeRequest(GET, "/retrieve").withHeaders(traceTokenHeader, traceLocalStorageHeader))
TraceLocal.retrieve(TraceLocalKey).get must be(traceLocalStorageValue)
}
"response to the getRouted Action and normalise the current TraceContext name" in {
- Await.result(WS.url("http://localhost:19001/getRouted").get, 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("getRouted.get")) must not be (empty)
+ Await.result(WS.url(s"http://localhost:$port/getRouted").get(), 10 seconds)
+ Kamon.metrics.find("getRouted.get", "trace") must not be empty
}
"response to the postRouted Action and normalise the current TraceContext name" in {
- Await.result(WS.url("http://localhost:19001/postRouted").post("content"), 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("postRouted.post")) must not be (empty)
+ Await.result(WS.url(s"http://localhost:$port/postRouted").post("content"), 10 seconds)
+ Kamon.metrics.find("postRouted.post", "trace") must not be empty
}
"response to the showRouted Action and normalise the current TraceContext name" in {
- Await.result(WS.url("http://localhost:19001/showRouted/2").get, 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("show.some.id.get")) must not be (empty)
+ Await.result(WS.url(s"http://localhost:$port/showRouted/2").get(), 10 seconds)
+ Kamon.metrics.find("show.some.id.get", "trace") must not be empty
+ }
+
+ "include HttpContext information for help to diagnose possible errors" in {
+ Await.result(WS.url(s"http://localhost:$port/getRouted").get(), 10 seconds)
+ route(FakeRequest(GET, "/default").withHeaders("User-Agent" -> "Fake-Agent"))
+
+ val httpCtx = TraceLocal.retrieve(HttpContextKey).get
+ httpCtx.agent must be("Fake-Agent")
+ httpCtx.uri must be("/default")
+ httpCtx.xforwarded must be("unknown")
}
"record http server metrics for all processed requests" in {
val collectionContext = CollectionContext(100)
- Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ Kamon.metrics.find("play-server", "http-server").get.collect(collectionContext)
for (repetition ← 1 to 10) {
Await.result(route(FakeRequest(GET, "/default").withHeaders(traceTokenHeader)).get, 10 seconds)
@@ -157,13 +167,13 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
Await.result(routeWithOnError(FakeRequest(GET, "/error").withHeaders(traceTokenHeader)).get, 10 seconds)
}
- val snapshot = Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
- snapshot.countsPerTraceAndStatusCode("GET: /default")("200").count must be(10)
- snapshot.countsPerTraceAndStatusCode("GET: /notFound")("404").count must be(5)
- snapshot.countsPerTraceAndStatusCode("GET: /error")("500").count must be(5)
- snapshot.countsPerStatusCode("200").count must be(10)
- snapshot.countsPerStatusCode("404").count must be(5)
- snapshot.countsPerStatusCode("500").count must be(5)
+ val snapshot = Kamon.metrics.find("play-server", "http-server").get.collect(collectionContext)
+ snapshot.counter("GET: /default_200").get.count must be(10)
+ snapshot.counter("GET: /notFound_404").get.count must be(5)
+ snapshot.counter("GET: /error_500").get.count must be(5)
+ snapshot.counter("200").get.count must be(10)
+ snapshot.counter("404").get.count must be(5)
+ snapshot.counter("500").get.count must be(5)
}
}
@@ -175,7 +185,7 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
object TraceLocalFilter extends Filter {
override def apply(next: (RequestHeader) ⇒ Future[SimpleResult])(header: RequestHeader): Future[SimpleResult] = {
- TraceRecorder.withTraceContext(TraceRecorder.currentContext) {
+ TraceContext.withContext(TraceContext.currentContext) {
TraceLocal.store(TraceLocalKey)(header.headers.get(traceLocalStorageKey).getOrElse("unknown"))
diff --git a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
index bda8281b..577d9bed 100644
--- a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
@@ -17,9 +17,8 @@
package kamon.play
import kamon.Kamon
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.metric.{ Metrics, TraceMetrics }
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity, TraceRecorder }
+import kamon.metric.{ EntitySnapshot, TraceMetrics }
+import kamon.trace.{ TraceContext, SegmentCategory }
import org.scalatest.{ Matchers, WordSpecLike }
import org.scalatestplus.play.OneServerPerSuite
import play.api.libs.ws.WS
@@ -27,19 +26,20 @@ import play.api.mvc.Action
import play.api.mvc.Results.Ok
import play.api.test.Helpers._
import play.api.test._
-import play.libs.Akka
import scala.concurrent.Await
import scala.concurrent.duration._
class WSInstrumentationSpec extends WordSpecLike with Matchers with OneServerPerSuite {
-
+ Kamon.start()
+ import kamon.metric.TraceMetricsSpec.SegmentSyntax
System.setProperty("config.file", "./kamon-play/src/test/resources/conf/application.conf")
+ override lazy val port: Port = 19003
implicit override lazy val app = FakeApplication(withRoutes = {
case ("GET", "/async") ⇒ Action { Ok("ok") }
case ("GET", "/outside") ⇒ Action { Ok("ok") }
- case ("GET", "/inside") ⇒ callWSinsideController("http://localhost:19001/async")
+ case ("GET", "/inside") ⇒ callWSinsideController(s"http://localhost:$port/async")
})
"the WS instrumentation" should {
@@ -47,29 +47,34 @@ class WSInstrumentationSpec extends WordSpecLike with Matchers with OneServerPer
Await.result(route(FakeRequest(GET, "/inside")).get, 10 seconds)
val snapshot = takeSnapshotOf("GET: /inside")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("http://localhost:19001/async", SegmentCategory.HttpClient, Play.SegmentLibraryName)).numberOfMeasurements should be(1)
+ snapshot.segment(s"http://localhost:$port/async", SegmentCategory.HttpClient, Play.SegmentLibraryName).numberOfMeasurements should be(1)
}
"propagate the TraceContext outside an Action and complete the WS request" in {
- TraceRecorder.withNewTraceContext("trace-outside-action") {
- Await.result(WS.url("http://localhost:19001/outside").get(), 10 seconds)
- TraceRecorder.finish()
- }(Akka.system())
+ TraceContext.withContext(newContext("trace-outside-action")) {
+ Await.result(WS.url(s"http://localhost:$port/outside").get(), 10 seconds)
+ TraceContext.currentContext.finish()
+ }
val snapshot = takeSnapshotOf("trace-outside-action")
- //snapshot.elapsedTime.numberOfMeasurements should be(1) disabled for fail in travis
- //snapshot.segments.size should be(1) disabled for fail in travis
- //snapshot.segments(HttpClientRequest("http://localhost:19001/outside")).numberOfMeasurements should be(1) disabled for fail in travis
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ snapshot.segments.size should be(1)
+ snapshot.segment(s"http://localhost:$port/outside", SegmentCategory.HttpClient, Play.SegmentLibraryName).numberOfMeasurements should be(1)
}
}
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics)(Akka.system()).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics)(Akka.system()).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
+ def newContext(name: String): TraceContext =
+ Kamon.tracer.newContext(name)
+
+ def takeSnapshotOf(traceName: String): EntitySnapshot = {
+ // Give some time for async segments to finish.
+ Thread.sleep(300)
+ val recorder = Kamon.metrics.register(TraceMetrics, traceName).get.recorder
+ val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ recorder.collect(collectionContext)
}
def callWSinsideController(url: String) = Action.async {
diff --git a/kamon-play/src/test/scala/kamon/play/instrumentation/FakeRequestIntrumentation.scala b/kamon-play/src/test/scala/kamon/play/instrumentation/FakeRequestIntrumentation.scala
index 55b72908..10e285db 100644
--- a/kamon-play/src/test/scala/kamon/play/instrumentation/FakeRequestIntrumentation.scala
+++ b/kamon-play/src/test/scala/kamon/play/instrumentation/FakeRequestIntrumentation.scala
@@ -21,6 +21,7 @@ import kamon.trace.TraceContextAware
@Aspect
class FakeRequestIntrumentation {
+
@DeclareMixin("play.api.test.FakeRequest")
def mixinContextAwareNewRequest: TraceContextAware = TraceContextAware.default
}
diff --git a/kamon-playground/src/main/resources/application.conf b/kamon-playground/src/main/resources/application.conf
index 86a87439..74e710bc 100644
--- a/kamon-playground/src/main/resources/application.conf
+++ b/kamon-playground/src/main/resources/application.conf
@@ -1,6 +1,5 @@
akka {
loglevel = DEBUG
- extensions = ["kamon.newrelic.NewRelic"]
actor {
debug {
@@ -16,84 +15,26 @@ spray.can {
}
kamon {
- newrelic {
- app-name = "SimpleRequestProcessor"
- license-key = e7d350b14228f3d28f35bc3140df2c3e565ea5d5
- }
-}
-
-kamon.statsd {
- hostname = "192.168.59.103"
- simple-metric-key-generator {
- metric-name-normalization-strategy = percent-encode
+ trace {
+ level = simple-trace
}
-}
-
-kamon {
- metrics {
- tick-interval = 1 second
-
- filters = [
- {
- actor {
- includes = [ "user/simple-service-actor" ]
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- trace {
- includes = [ "*" ]
- excludes = []
- }
- },
- {
- dispatcher {
- includes = [ "akka.actor.default-dispatcher" ]
- excludes = []
- }
- },
- {
- custom-metric {
- includes = [ "*" ]
- excludes = [ ]
- }
- },
- {
- router {
- includes = [ "user/replier" ]
- excludes = [ "system/*", "user/IO-*" ]
- }
- }
- ]
- precision {
- actor {
- processing-time {
- highest-trackable-value = 3600000000000
- significant-value-digits = 1
- }
- time-in-mailbox {
- highest-trackable-value = 3600000000000
- significant-value-digits = 1
- }
- mailbox-size {
- highest-trackable-value = 99999999
- significant-value-digits = 1
- }
- }
+ metric {
+ filters {
+ trace.includes = [ "**" ]
+ actor.includes = [ "**" ]
}
}
-}
-
+ newrelic {
+ app-name = "SimpleRequestProcessor"
+ license-key = e7d350b14228f3d28f35bc3140df2c3e565ea5d5
+ }
-kamon {
- metrics {
- actors {
- tracked = [
- "user/simple-service-actor",
- "other"
- ]
- }
+ modules {
+ kamon-newrelic.auto-start = no
+ kamon-datadog.auto-start = no
+ kamon-log-reporter.auto-start = no
+ kamon-system-metrics.auto-start = no
}
-} \ No newline at end of file
+}
diff --git a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
index b548b057..7a38e790 100644
--- a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
+++ b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
@@ -20,10 +20,10 @@ import akka.actor._
import akka.routing.RoundRobinRouter
import akka.util.Timeout
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
import kamon.spray.KamonTraceDirectives
-import kamon.trace.{ SegmentCategory, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import spray.http.{ StatusCodes, Uri }
import spray.httpx.RequestBuilding
import spray.routing.SimpleRoutingApp
@@ -38,6 +38,7 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
import scala.concurrent.duration._
implicit val system = ActorSystem("test")
+ Kamon.start()
import test.SimpleRequestProcessor.system.dispatcher
val printer = system.actorOf(Props[PrintWhatever])
@@ -46,26 +47,9 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
def receive: Actor.Receive = { case any ⇒ sender ! any }
}), "com")
- //val buffer = system.actorOf(TickMetricSnapshotBuffer.props(30 seconds, printer))
-
- //Kamon(Metrics).subscribe(CustomMetric, "*", buffer, permanently = true)
- //Kamon(Metrics).subscribe(ActorMetrics, "*", printer, permanently = true)
-
implicit val timeout = Timeout(30 seconds)
- val counter = Kamon(UserMetrics).registerCounter("requests")
- Kamon(UserMetrics).registerCounter("requests-2")
- Kamon(UserMetrics).registerCounter("requests-3")
-
- Kamon(UserMetrics).registerHistogram("histogram-1")
- Kamon(UserMetrics).registerHistogram("histogram-2")
-
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-1")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-2")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-3")
-
- //Kamon(UserMetrics).registerGauge("test-gauge")(() => 10L)
-
+ val counter = Kamon.userMetrics.counter("requests")
val pipeline = sendReceive
val replier = system.actorOf(Props[Replier].withRouter(RoundRobinRouter(nrOfInstances = 4)), "replier")
@@ -132,7 +116,7 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
} ~
path("segment") {
complete {
- val segment = TraceRecorder.currentContext.startSegment("hello-world", SegmentCategory.HttpClient, "none")
+ val segment = TraceContext.currentContext.startSegment("hello-world", SegmentCategory.HttpClient, "none")
(replier ? "hello").mapTo[String].onComplete { t ⇒
segment.finish()
}
@@ -178,7 +162,7 @@ object Verifier extends App {
class Replier extends Actor with ActorLogging {
def receive = {
case anything ⇒
- if (TraceRecorder.currentContext.isEmpty)
+ if (TraceContext.currentContext.isEmpty)
log.warning("PROCESSING A MESSAGE WITHOUT CONTEXT")
//log.info("Processing at the Replier, and self is: {}", self)
@@ -195,6 +179,6 @@ object PingPong extends App {
def receive: Actor.Receive = { case "ping" ⇒ sender ! "pong" }
}))
- pinger.tell("pong", ponger)
+ //pinger.tell("pong", ponger)
}
diff --git a/kamon-scala/src/main/resources/META-INF/aop.xml b/kamon-scala/src/main/resources/META-INF/aop.xml
new file mode 100644
index 00000000..a1e98a9f
--- /dev/null
+++ b/kamon-scala/src/main/resources/META-INF/aop.xml
@@ -0,0 +1,17 @@
+<!DOCTYPE aspectj PUBLIC "-//AspectJ//DTD//EN" "http://www.eclipse.org/aspectj/dtd/aspectj.dtd">
+
+<aspectj>
+ <aspects>
+
+ <!-- Futures -->
+ <aspect name="kamon.scala.instrumentation.FutureInstrumentation"/>
+ <aspect name="kamon.scalaz.instrumentation.FutureInstrumentation"/>
+
+ </aspects>
+
+ <weaver>
+ <include within="scala.concurrent..*"/>
+ <include within="scalaz.concurrent..*"/>
+ </weaver>
+
+</aspectj> \ No newline at end of file
diff --git a/kamon-scala/src/main/resources/reference.conf b/kamon-scala/src/main/resources/reference.conf
new file mode 100644
index 00000000..1a621e09
--- /dev/null
+++ b/kamon-scala/src/main/resources/reference.conf
@@ -0,0 +1,14 @@
+# =================================== #
+# Kamon-Scala Reference Configuration #
+# =================================== #
+
+kamon {
+
+ module-info {
+ kamons {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = none
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala b/kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala
index bda2da78..01514869 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala
+++ b/kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala
@@ -14,9 +14,9 @@
* =========================================================================================
*/
-package kamon.instrumentation.scala
+package kamon.scala.instrumentation
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -40,7 +40,7 @@ class FutureInstrumentation {
@Around("futureRelatedRunnableExecution(runnable)")
def aroundExecution(pjp: ProceedingJoinPoint, runnable: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(runnable.traceContext) {
+ TraceContext.withContext(runnable.traceContext) {
pjp.proceed()
}
}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala b/kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala
index 65caaa8f..b5aadbd3 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala
+++ b/kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala
@@ -14,9 +14,9 @@
* =========================================================================================
*/
-package kamon.instrumentation.scalaz
+package kamon.scalaz.instrumentation
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -40,7 +40,7 @@ class FutureInstrumentation {
@Around("futureRelatedCallableExecution(callable)")
def aroundExecution(pjp: ProceedingJoinPoint, callable: TraceContextAware): Any =
- TraceRecorder.withInlineTraceContextReplacement(callable.traceContext) {
+ TraceContext.withContext(callable.traceContext) {
pjp.proceed()
}
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala b/kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala
index 31afd3ff..d70e88ae 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala
+++ b/kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala
@@ -13,18 +13,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.instrumentation.scala
+package kamon.scala.instrumentation
-import akka.actor.ActorSystem
-import akka.testkit.TestKit
-import kamon.trace.TraceRecorder
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import org.scalatest.OptionValues
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
import scala.concurrent.Future
-class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentation-spec")) with WordSpecLike with Matchers
- with ScalaFutures with PatienceConfiguration with OptionValues {
+class FutureInstrumentationSpec extends BaseKamonSpec("future-instrumentation-spec") with ScalaFutures
+ with PatienceConfiguration with OptionValues {
implicit val execContext = system.dispatcher
@@ -32,10 +31,10 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"capture the TraceContext available when created" which {
"must be available when executing the future's body" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
- val future = Future(TraceRecorder.currentContext)
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
+ val future = Future(TraceContext.currentContext)
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
whenReady(future)(ctxInFuture ⇒
@@ -44,14 +43,14 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"must be available when executing callbacks on the future" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
val future = Future("Hello Kamon!")
// The TraceContext is expected to be available during all intermediate processing.
.map(_.length)
.flatMap(len ⇒ Future(len.toString))
- .map(s ⇒ TraceRecorder.currentContext)
+ .map(s ⇒ TraceContext.currentContext)
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
whenReady(future)(ctxInFuture ⇒
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala b/kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala
index 29bf96f8..ba8fa18c 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala
+++ b/kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala
@@ -13,18 +13,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.instrumentation.scalaz
+package kamon.scalaz.instrumentation
-import akka.actor.ActorSystem
-import akka.testkit.TestKit
-import kamon.trace.TraceRecorder
+import java.util.concurrent.Executors
+
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import org.scalatest.OptionValues
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
+
import scalaz.concurrent.Future
-import java.util.concurrent.Executors
-class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentation-spec")) with WordSpecLike with Matchers
- with ScalaFutures with PatienceConfiguration with OptionValues {
+class FutureInstrumentationSpec extends BaseKamonSpec("future-instrumentation-spec") with ScalaFutures
+ with PatienceConfiguration with OptionValues {
implicit val execContext = Executors.newCachedThreadPool()
@@ -32,10 +33,10 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"capture the TraceContext available when created" which {
"must be available when executing the future's body" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
- val future = Future(TraceRecorder.currentContext).start
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
+ val future = Future(TraceContext.currentContext).start
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
val ctxInFuture = future.run
@@ -44,14 +45,14 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"must be available when executing callbacks on the future" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
val future = Future("Hello Kamon!")
// The TraceContext is expected to be available during all intermediate processing.
.map(_.length)
.flatMap(len ⇒ Future(len.toString))
- .map(s ⇒ TraceRecorder.currentContext)
+ .map(s ⇒ TraceContext.currentContext)
- (future.start, TraceRecorder.currentContext)
+ (future.start, TraceContext.currentContext)
}
val ctxInFuture = future.run
diff --git a/kamon-spray/src/main/resources/META-INF/aop.xml b/kamon-spray/src/main/resources/META-INF/aop.xml
index 0e5726c6..00e8763a 100644
--- a/kamon-spray/src/main/resources/META-INF/aop.xml
+++ b/kamon-spray/src/main/resources/META-INF/aop.xml
@@ -2,14 +2,16 @@
<aspectj>
<aspects>
+
<!-- Spray Server -->
- <aspect name="spray.can.server.ServerRequestInstrumentation"/>
+ <aspect name="spray.can.server.instrumentation.ServerRequestInstrumentation"/>
<!-- Spray Client -->
<aspect name="spray.can.client.ClientRequestInstrumentation"/>
+
</aspects>
<weaver>
- <include within="spray.can..*"/>
+ <include within="spray..*"/>
</weaver>
</aspectj>
diff --git a/kamon-spray/src/main/resources/reference.conf b/kamon-spray/src/main/resources/reference.conf
index 5c5e9317..c5270ef5 100644
--- a/kamon-spray/src/main/resources/reference.conf
+++ b/kamon-spray/src/main/resources/reference.conf
@@ -4,6 +4,7 @@
kamon {
spray {
+
# Header name used when propagating the `TraceContext.token` value across applications.
trace-token-header-name = "X-Trace-Token"
@@ -23,16 +24,24 @@ kamon {
client {
# Strategy used for automatic trace segment generation when issue requests with spray-client. The possible values
# are:
- # - pipelining: measures the time during which the user application code is waiting for a spray-client request to
+ # - request-level: measures the time during which the user application code is waiting for a spray-client request to
# complete, by attaching a callback to the Future[HttpResponse] returned by `spray.client.pipelining.sendReceive`.
# If `spray.client.pipelining.sendReceive` is not used, the segment measurement wont be performed.
- # - internal: measures the internal time taken by spray-client to finish a request. Sometimes the user application
+ # - host-level: measures the internal time taken by spray-client to finish a request. Sometimes the user application
# code has a finite future timeout (like when using `spray.client.pipelining.sendReceive`) that doesn't match
# the actual amount of time spray might take internally to resolve a request, counting retries, redirects,
# connection timeouts and so on. If using the internal strategy, the measured time will include the entire time
# since the request has been received by the corresponding `HttpHostConnector` until a response is sent back
# to the requester.
- segment-collection-strategy = pipelining
+ instrumentation-level = request-level
+ }
+ }
+
+ modules {
+ kamon-spray {
+ auto-start = yes
+ requires-aspectj = yes
+ extension-id = "kamon.spray.Spray"
}
}
} \ No newline at end of file
diff --git a/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala b/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
index e98b63d9..4eefee95 100644
--- a/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
@@ -17,11 +17,11 @@ package kamon.spray
import spray.routing.directives.BasicDirectives
import spray.routing._
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
trait KamonTraceDirectives extends BasicDirectives {
def traceName(name: String): Directive0 = mapRequest { req ⇒
- TraceRecorder.rename(name)
+ TraceContext.currentContext.rename(name)
req
}
}
diff --git a/kamon-spray/src/main/scala/kamon/spray/Spray.scala b/kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala
index ab8d6a7d..ab0fe50b 100644
--- a/kamon-spray/src/main/scala/kamon/spray/Spray.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala
@@ -18,47 +18,49 @@ package kamon.spray
import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
import akka.actor
+import akka.event.{ Logging, LoggingAdapter }
import kamon.Kamon
import kamon.http.HttpServerMetrics
-import kamon.metric.Metrics
+import kamon.metric.Entity
import spray.http.HttpHeaders.Host
import spray.http.HttpRequest
object Spray extends ExtensionId[SprayExtension] with ExtensionIdProvider {
def lookup(): ExtensionId[_ <: actor.Extension] = Spray
- def createExtension(system: ExtendedActorSystem): SprayExtension = new SprayExtension(system)
+ def createExtension(system: ExtendedActorSystem): SprayExtension = new SprayExtensionImpl(system)
val SegmentLibraryName = "spray-client"
}
-object ClientSegmentCollectionStrategy {
- sealed trait Strategy
- case object Pipelining extends Strategy
- case object Internal extends Strategy
+trait SprayExtension extends Kamon.Extension {
+ def settings: SprayExtensionSettings
+ def log: LoggingAdapter
+ def httpServerMetrics: HttpServerMetrics
+ def generateTraceName(request: HttpRequest): String
+ def generateRequestLevelApiSegmentName(request: HttpRequest): String
+ def generateHostLevelApiSegmentName(request: HttpRequest): String
}
-class SprayExtension(private val system: ExtendedActorSystem) extends Kamon.Extension {
- private val config = system.settings.config.getConfig("kamon.spray")
+class SprayExtensionImpl(system: ExtendedActorSystem) extends SprayExtension {
+ val settings = SprayExtensionSettings(system)
+ val log = Logging(system, "SprayExtension")
- val includeTraceToken: Boolean = config.getBoolean("automatic-trace-token-propagation")
- val traceTokenHeaderName: String = config.getString("trace-token-header-name")
- val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
- // It's safe to assume that HttpServerMetrics will always exist because there is no particular filter for it.
+ val httpServerMetrics = {
+ val metricsExtension = Kamon.metrics
+ val factory = metricsExtension.instrumentFactory(HttpServerMetrics.category)
+ val entity = Entity("spray-server", HttpServerMetrics.category)
- private val nameGeneratorFQN = config.getString("name-generator")
- private val nameGenerator: SprayNameGenerator = system.dynamicAccess.createInstanceFor[SprayNameGenerator](nameGeneratorFQN, Nil).get // let's bubble up any problems.
+ metricsExtension.register(entity, new HttpServerMetrics(factory)).recorder
+ }
- val clientSegmentCollectionStrategy: ClientSegmentCollectionStrategy.Strategy =
- config.getString("client.segment-collection-strategy") match {
- case "pipelining" ⇒ ClientSegmentCollectionStrategy.Pipelining
- case "internal" ⇒ ClientSegmentCollectionStrategy.Internal
- case other ⇒ throw new IllegalArgumentException(s"Configured segment-collection-strategy [$other] is invalid, " +
- s"only pipelining and internal are valid options.")
- }
+ def generateTraceName(request: HttpRequest): String =
+ settings.nameGenerator.generateTraceName(request)
- def generateTraceName(request: HttpRequest): String = nameGenerator.generateTraceName(request)
- def generateRequestLevelApiSegmentName(request: HttpRequest): String = nameGenerator.generateRequestLevelApiSegmentName(request)
- def generateHostLevelApiSegmentName(request: HttpRequest): String = nameGenerator.generateHostLevelApiSegmentName(request)
+ def generateRequestLevelApiSegmentName(request: HttpRequest): String =
+ settings.nameGenerator.generateRequestLevelApiSegmentName(request)
+
+ def generateHostLevelApiSegmentName(request: HttpRequest): String =
+ settings.nameGenerator.generateHostLevelApiSegmentName(request)
}
trait SprayNameGenerator {
@@ -68,14 +70,19 @@ trait SprayNameGenerator {
}
class DefaultSprayNameGenerator extends SprayNameGenerator {
- def hostFromHeaders(request: HttpRequest): Option[String] = request.header[Host].map(_.host)
def generateRequestLevelApiSegmentName(request: HttpRequest): String = {
val uriAddress = request.uri.authority.host.address
if (uriAddress.equals("")) hostFromHeaders(request).getOrElse("unknown-host") else uriAddress
}
- def generateHostLevelApiSegmentName(request: HttpRequest): String = hostFromHeaders(request).getOrElse("unknown-host")
+ def generateHostLevelApiSegmentName(request: HttpRequest): String =
+ hostFromHeaders(request).getOrElse("unknown-host")
+
+ def generateTraceName(request: HttpRequest): String =
+ request.method.value + ": " + request.uri.path
+
+ private def hostFromHeaders(request: HttpRequest): Option[String] =
+ request.header[Host].map(_.host)
- def generateTraceName(request: HttpRequest): String = request.method.value + ": " + request.uri.path
}
diff --git a/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala b/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala
new file mode 100644
index 00000000..44c71eaf
--- /dev/null
+++ b/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala
@@ -0,0 +1,35 @@
+package kamon.spray
+
+import akka.actor.ExtendedActorSystem
+
+case class SprayExtensionSettings(
+ includeTraceTokenHeader: Boolean,
+ traceTokenHeaderName: String,
+ nameGenerator: SprayNameGenerator,
+ clientInstrumentationLevel: ClientInstrumentationLevel.Level)
+
+object SprayExtensionSettings {
+ def apply(system: ExtendedActorSystem): SprayExtensionSettings = {
+ val config = system.settings.config.getConfig("kamon.spray")
+
+ val includeTraceTokenHeader: Boolean = config.getBoolean("automatic-trace-token-propagation")
+ val traceTokenHeaderName: String = config.getString("trace-token-header-name")
+
+ val nameGeneratorFQN = config.getString("name-generator")
+ val nameGenerator: SprayNameGenerator = system.dynamicAccess.createInstanceFor[SprayNameGenerator](nameGeneratorFQN, Nil).get // let's bubble up any problems.
+
+ val clientInstrumentationLevel: ClientInstrumentationLevel.Level = config.getString("client.instrumentation-level") match {
+ case "request-level" ⇒ ClientInstrumentationLevel.RequestLevelAPI
+ case "host-level" ⇒ ClientInstrumentationLevel.HostLevelAPI
+ case other ⇒ sys.error(s"Invalid client instrumentation level [$other] found in configuration.")
+ }
+
+ SprayExtensionSettings(includeTraceTokenHeader, traceTokenHeaderName, nameGenerator, clientInstrumentationLevel)
+ }
+}
+
+object ClientInstrumentationLevel {
+ sealed trait Level
+ case object RequestLevelAPI extends Level
+ case object HostLevelAPI extends Level
+}
diff --git a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala
index 813915c4..d1e9036d 100644
--- a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala
@@ -16,13 +16,13 @@
package spray.can.client
+import kamon.Kamon
import org.aspectj.lang.annotation._
import org.aspectj.lang.ProceedingJoinPoint
import spray.http._
import spray.http.HttpHeaders.RawHeader
import kamon.trace._
-import kamon.Kamon
-import kamon.spray.{ ClientSegmentCollectionStrategy, Spray }
+import kamon.spray.{ ClientInstrumentationLevel, Spray }
import akka.actor.ActorRef
import scala.concurrent.{ Future, ExecutionContext }
import akka.util.Timeout
@@ -47,10 +47,10 @@ class ClientRequestInstrumentation {
// This read to requestContext.traceContext takes care of initializing the aspect timely.
requestContext.traceContext
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
+ TraceContext.map { ctx ⇒
+ val sprayExtension = Kamon.extension(Spray)
- if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Internal) {
+ if (sprayExtension.settings.clientInstrumentationLevel == ClientInstrumentationLevel.HostLevelAPI) {
if (requestContext.segment.isEmpty) {
val clientRequestName = sprayExtension.generateHostLevelApiSegmentName(request)
val segment = ctx.startSegment(clientRequestName, SegmentCategory.HttpClient, Spray.SegmentLibraryName)
@@ -74,7 +74,7 @@ class ClientRequestInstrumentation {
@Around("copyingRequestContext(old)")
def aroundCopyingRequestContext(pjp: ProceedingJoinPoint, old: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(old.traceContext) {
+ TraceContext.withContext(old.traceContext) {
pjp.proceed()
}
}
@@ -85,7 +85,7 @@ class ClientRequestInstrumentation {
@Around("dispatchToCommander(requestContext, message)")
def aroundDispatchToCommander(pjp: ProceedingJoinPoint, requestContext: TraceContextAware, message: Any): Any = {
if (requestContext.traceContext.nonEmpty) {
- TraceRecorder.withInlineTraceContextReplacement(requestContext.traceContext) {
+ TraceContext.withContext(requestContext.traceContext) {
if (message.isInstanceOf[HttpMessageEnd])
requestContext.asInstanceOf[SegmentAware].segment.finish()
@@ -112,10 +112,10 @@ class ClientRequestInstrumentation {
val originalSendReceive = pjp.proceed().asInstanceOf[HttpRequest ⇒ Future[HttpResponse]]
(request: HttpRequest) ⇒ {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
+ TraceContext.map { ctx ⇒
+ val sprayExtension = Kamon.extension(Spray)
val segment =
- if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Pipelining)
+ if (sprayExtension.settings.clientInstrumentationLevel == ClientInstrumentationLevel.RequestLevelAPI)
ctx.startSegment(sprayExtension.generateRequestLevelApiSegmentName(request), SegmentCategory.HttpClient, Spray.SegmentLibraryName)
else
EmptyTraceContext.EmptySegment
@@ -139,10 +139,10 @@ class ClientRequestInstrumentation {
@Around("includingDefaultHeadersAtHttpHostConnector(request, defaultHeaders)")
def aroundIncludingDefaultHeadersAtHttpHostConnector(pjp: ProceedingJoinPoint, request: HttpMessage, defaultHeaders: List[HttpHeader]): Any = {
- val modifiedHeaders = TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
- if (sprayExtension.includeTraceToken)
- RawHeader(sprayExtension.traceTokenHeaderName, ctx.token) :: defaultHeaders
+ val modifiedHeaders = TraceContext.map { ctx ⇒
+ val sprayExtension = Kamon.extension(Spray)
+ if (sprayExtension.settings.includeTraceTokenHeader)
+ RawHeader(sprayExtension.settings.traceTokenHeaderName, ctx.token) :: defaultHeaders
else
defaultHeaders
@@ -150,4 +150,4 @@ class ClientRequestInstrumentation {
pjp.proceed(Array[AnyRef](request, modifiedHeaders))
}
-}
+} \ No newline at end of file
diff --git a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala
index 93a9cf55..bf20d167 100644
--- a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala
@@ -13,13 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package spray.can.server
+package spray.can.server.instrumentation
+import kamon.trace.TraceLocal.{ HttpContext, HttpContextKey }
import org.aspectj.lang.annotation._
import kamon.trace._
import akka.actor.ActorSystem
+import spray.can.server.OpenRequest
import spray.http.{ HttpResponse, HttpMessagePartWrapper, HttpRequest }
-import akka.event.Logging.Warning
import kamon.Kamon
import kamon.spray.{ SprayExtension, Spray }
import org.aspectj.lang.ProceedingJoinPoint
@@ -28,6 +29,8 @@ import spray.http.HttpHeaders.RawHeader
@Aspect
class ServerRequestInstrumentation {
+ import ServerRequestInstrumentation._
+
@DeclareMixin("spray.can.server.OpenRequestComponent.DefaultOpenRequest")
def mixinContextAwareToOpenRequest: TraceContextAware = TraceContextAware.default
@@ -36,15 +39,16 @@ class ServerRequestInstrumentation {
@After("openRequestInit(openRequest, request)")
def afterInit(openRequest: TraceContextAware, request: HttpRequest): Unit = {
- val system: ActorSystem = openRequest.asInstanceOf[OpenRequest].context.actorContext.system
- val sprayExtension = Kamon(Spray)(system)
+ import Kamon.tracer
+ val sprayExtension = Kamon(Spray)
val defaultTraceName = sprayExtension.generateTraceName(request)
- val token = if (sprayExtension.includeTraceToken) {
- request.headers.find(_.name == sprayExtension.traceTokenHeaderName).map(_.value)
+ val token = if (sprayExtension.settings.includeTraceTokenHeader) {
+ request.headers.find(_.name == sprayExtension.settings.traceTokenHeaderName).map(_.value)
} else None
- TraceRecorder.start(defaultTraceName, token)(system)
+ val newContext = token.map(customToken ⇒ tracer.newContext(defaultTraceName, customToken)) getOrElse (tracer.newContext(defaultTraceName))
+ TraceContext.setCurrentContext(newContext)
// Necessary to force initialization of traceContext when initiating the request.
openRequest.traceContext
@@ -55,7 +59,7 @@ class ServerRequestInstrumentation {
@After("openNewRequest()")
def afterOpenNewRequest(): Unit = {
- TraceRecorder.clearContext
+ TraceContext.clearCurrentContext
}
@Pointcut("execution(* spray.can.server.OpenRequestComponent$DefaultOpenRequest.handleResponseEndAndReturnNextOpenRequest(..)) && target(openRequest) && args(response)")
@@ -63,40 +67,43 @@ class ServerRequestInstrumentation {
@Around("openRequestCreation(openRequest, response)")
def afterFinishingRequest(pjp: ProceedingJoinPoint, openRequest: TraceContextAware, response: HttpMessagePartWrapper): Any = {
- val incomingContext = TraceRecorder.currentContext
+ val incomingContext = TraceContext.currentContext
val storedContext = openRequest.traceContext
// The stored context is always a DefaultTraceContext if the instrumentation is running
- val system = storedContext.asInstanceOf[DefaultTraceContext].system
-
- verifyTraceContextConsistency(incomingContext, storedContext, system)
+ verifyTraceContextConsistency(incomingContext, storedContext)
if (incomingContext.isEmpty)
pjp.proceed()
else {
- val sprayExtension = Kamon(Spray)(system)
+ val sprayExtension = Kamon(Spray)
- val proceedResult = if (sprayExtension.includeTraceToken) {
- val responseWithHeader = includeTraceTokenIfPossible(response, sprayExtension.traceTokenHeaderName, incomingContext.token)
+ val proceedResult = if (sprayExtension.settings.includeTraceTokenHeader) {
+ val responseWithHeader = includeTraceTokenIfPossible(response, sprayExtension.settings.traceTokenHeaderName, incomingContext.token)
pjp.proceed(Array(openRequest, responseWithHeader))
} else pjp.proceed
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
+
recordHttpServerMetrics(response, incomingContext.name, sprayExtension)
+
+ //store in TraceLocal useful data to diagnose errors
+ storeDiagnosticData(openRequest)
+
proceedResult
}
}
- def verifyTraceContextConsistency(incomingTraceContext: TraceContext, storedTraceContext: TraceContext, system: ActorSystem): Unit = {
- def publishWarning(text: String, system: ActorSystem): Unit =
- system.eventStream.publish(Warning("ServerRequestInstrumentation", classOf[ServerRequestInstrumentation], text))
+ def verifyTraceContextConsistency(incomingTraceContext: TraceContext, storedTraceContext: TraceContext): Unit = {
+ def publishWarning(text: String): Unit =
+ Kamon(Spray).log.warning(text)
if (incomingTraceContext.nonEmpty) {
if (incomingTraceContext.token != storedTraceContext.token)
- publishWarning(s"Different trace token found when trying to close a trace, original: [${storedTraceContext.token}] - incoming: [${incomingTraceContext.token}]", system)
+ publishWarning(s"Different trace token found when trying to close a trace, original: [${storedTraceContext.token}] - incoming: [${incomingTraceContext.token}]")
} else
- publishWarning(s"EmptyTraceContext present while closing the trace with token [${storedTraceContext.token}]", system)
+ publishWarning(s"EmptyTraceContext present while closing the trace with token [${storedTraceContext.token}]")
}
def recordHttpServerMetrics(response: HttpMessagePartWrapper, traceName: String, sprayExtension: SprayExtension): Unit =
@@ -110,4 +117,19 @@ class ServerRequestInstrumentation {
case response: HttpResponse ⇒ response.withHeaders(response.headers ::: RawHeader(traceTokenHeaderName, token) :: Nil)
case other ⇒ other
}
+
+ def storeDiagnosticData(currentContext: TraceContextAware): Unit = {
+ val request = currentContext.asInstanceOf[OpenRequest].request
+ val headers = request.headers.map(header ⇒ header.name -> header.value).toMap
+ val agent = headers.getOrElse(UserAgent, Unknown)
+ val forwarded = headers.getOrElse(XForwardedFor, Unknown)
+
+ TraceLocal.store(HttpContextKey)(HttpContext(agent, request.uri.toRelative.toString(), forwarded))
+ }
+}
+
+object ServerRequestInstrumentation {
+ val UserAgent = "User-Agent"
+ val XForwardedFor = "X-Forwarded-For"
+ val Unknown = "unknown"
}
diff --git a/kamon-spray/src/test/resources/application.conf b/kamon-spray/src/test/resources/application.conf
index 4a9b2c67..8b137891 100644
--- a/kamon-spray/src/test/resources/application.conf
+++ b/kamon-spray/src/test/resources/application.conf
@@ -1,26 +1 @@
-kamon {
- metrics {
- tick-interval = 1 second
- filters = [
- {
- actor {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- trace {
- includes = [ "*" ]
- excludes = []
- }
- },
- {
- dispatcher {
- includes = [ "default-dispatcher" ]
- excludes = []
- }
- }
- ]
- }
-} \ No newline at end of file
diff --git a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
index b90b0f3b..a99bbdcc 100644
--- a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
@@ -16,50 +16,36 @@
package kamon.spray
-import akka.testkit.{ TestKitBase, TestProbe }
-import akka.actor.ActorSystem
+import akka.testkit.TestProbe
+import kamon.testkit.BaseKamonSpec
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{ Millis, Seconds, Span }
-import org.scalatest.{ Matchers, WordSpecLike }
import spray.httpx.RequestBuilding
import spray.http.{ HttpResponse, HttpRequest }
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import com.typesafe.config.ConfigFactory
import spray.can.Http
import spray.http.HttpHeaders.RawHeader
import kamon.Kamon
-import kamon.metric.{ TraceMetrics, Metrics }
+import kamon.metric.TraceMetricsSpec
import spray.client.pipelining.sendReceive
-import kamon.metric.Subscriptions.TickMetricSnapshot
import scala.concurrent.duration._
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-
-class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with ScalaFutures with RequestBuilding with TestServer {
- implicit lazy val system: ActorSystem = ActorSystem("client-request-instrumentation-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | spray {
- | name-generator = kamon.spray.TestSprayNameGenerator
- | }
- |
- | metrics {
- | tick-interval = 1 hour
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+
+class ClientRequestInstrumentationSpec extends BaseKamonSpec("client-request-instrumentation-spec") with ScalaFutures
+ with RequestBuilding with TestServer {
+
+ import TraceMetricsSpec.SegmentSyntax
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | metric.tick-interval = 1 hour
+ | spray.name-generator = kamon.spray.TestSprayNameGenerator
+ |}
+ |
+ |akka.loggers = ["akka.event.slf4j.Slf4jLogger"]
+ """.stripMargin)
implicit def ec = system.dispatcher
implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(5, Millis))
@@ -71,12 +57,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("include-trace-token-header-at-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("include-trace-token-header-at-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/dummy-path")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -85,7 +71,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should contain(traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -98,12 +84,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("do-not-include-trace-token-header-at-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("do-not-include-trace-token-header-at-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/dummy-path")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -112,7 +98,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -128,12 +114,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, _, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("assign-name-to-segment-with-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("assign-name-to-segment-with-request-level-api")) {
val rF = sendReceive(transport.ref)(ec, 10.seconds) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/request-level-api-segment")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Receive the request and reply back
@@ -142,10 +128,10 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
responseFuture.futureValue.entity.asString should be("ok")
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("assign-name-to-segment-with-request-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("request-level /request-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("assign-name-to-segment-with-request-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("request-level /request-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
"rename a request level api segment once it reaches the relevant host connector" in {
@@ -155,12 +141,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("rename-segment-with-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("rename-segment-with-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/request-level-api-segment")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -173,10 +159,10 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
responseFuture.futureValue.entity.asString should be("ok")
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("rename-segment-with-request-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("host-level /request-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("rename-segment-with-request-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("host-level /request-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
}
@@ -189,9 +175,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("include-trace-token-header-on-http-client-request") {
+ val testContext = TraceContext.withContext(newContext("include-trace-token-header-on-http-client-request")) {
client.send(hostConnector, Get("/dummy-path"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -200,7 +186,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should contain(traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -216,9 +202,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("not-include-trace-token-header-on-http-client-request") {
+ val testContext = TraceContext.withContext(newContext("not-include-trace-token-header-on-http-client-request")) {
client.send(hostConnector, Get("/dummy-path"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -227,7 +213,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -243,9 +229,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("create-segment-with-host-level-api") {
+ val testContext = TraceContext.withContext(newContext("create-segment-with-host-level-api")) {
client.send(hostConnector, Get("/host-level-api-segment"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -254,52 +240,39 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
client.expectMsgType[HttpResponse]
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("create-segment-with-host-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("host-level /host-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("create-segment-with-host-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("host-level /host-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
}
}
- def expectTraceMetrics(traceName: String, listener: TestProbe, timeout: FiniteDuration): TraceMetricsSnapshot = {
- val tickSnapshot = within(timeout) {
- listener.expectMsgType[TickMetricSnapshot]
- }
-
- val metricsOption = tickSnapshot.metrics.get(TraceMetrics(traceName))
- metricsOption should not be empty
- metricsOption.get.asInstanceOf[TraceMetricsSnapshot]
- }
-
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
- }
+ def traceTokenHeader(token: String): RawHeader =
+ RawHeader(Kamon(Spray).settings.traceTokenHeaderName, token)
- def enableInternalSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientSegmentCollectionStrategy.Internal)
- def enablePipeliningSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientSegmentCollectionStrategy.Pipelining)
+ def enableInternalSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientInstrumentationLevel.HostLevelAPI)
+ def enablePipeliningSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientInstrumentationLevel.RequestLevelAPI)
def enableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(true)
def disableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(false)
- def setSegmentCollectionStrategy(strategy: ClientSegmentCollectionStrategy.Strategy): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("clientSegmentCollectionStrategy")
+ def setSegmentCollectionStrategy(strategy: ClientInstrumentationLevel.Level): Unit = {
+ val target = Kamon(Spray).settings
+ val field = target.getClass.getDeclaredField("clientInstrumentationLevel")
field.setAccessible(true)
field.set(target, strategy)
}
def setIncludeTraceToken(include: Boolean): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("includeTraceToken")
+ val target = Kamon(Spray).settings
+ val field = target.getClass.getDeclaredField("includeTraceTokenHeader")
field.setAccessible(true)
field.set(target, include)
}
diff --git a/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
index c4b370d7..58bb2885 100644
--- a/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
@@ -1,46 +1,27 @@
package kamon.spray
-import akka.actor.ActorSystem
-import akka.testkit.{ TestProbe, TestKitBase }
+import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.http.HttpServerMetrics
-import kamon.metric._
+import kamon.testkit.BaseKamonSpec
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, WordSpecLike }
import spray.http.{ StatusCodes, HttpResponse, HttpRequest }
import spray.httpx.RequestBuilding
-class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
- with ScalaFutures with PatienceConfiguration with TestServer {
+class SprayServerMetricsSpec extends BaseKamonSpec("spray-server-metrics-spec") with RequestBuilding with ScalaFutures
+ with PatienceConfiguration with TestServer {
- val collectionContext = CollectionContext(100)
-
- implicit lazy val system: ActorSystem = ActorSystem("spray-server-metrics-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | metrics {
- | tick-interval = 1 hour
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ |
+ |akka.loggers = ["akka.event.slf4j.Slf4jLogger"]
+ """.stripMargin)
"the Spray Server metrics instrumentation" should {
- "record trace metrics for requests received" in {
- Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
+ "record trace metrics for processed requests" in {
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
@@ -58,15 +39,17 @@ class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers
client.expectMsgType[HttpResponse]
}
- val snapshot = Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
- snapshot.elapsedTime.numberOfMeasurements should be(15)
+ val snapshot = takeSnapshotOf("GET: /record-trace-metrics", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(15)
}
- "record http serve metrics for all the requests" in {
- Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ "record http server metrics for all the requests" in {
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
+ // Erase metrics recorder from previous tests.
+ takeSnapshotOf("spray-server", "http-server")
+
for (repetition ← 1 to 10) {
client.send(connection, Get("/record-http-metrics"))
server.expectMsgType[HttpRequest]
@@ -81,11 +64,11 @@ class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers
client.expectMsgType[HttpResponse]
}
- val snapshot = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
- snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("200").count should be(10)
- snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("400").count should be(5)
- snapshot.countsPerStatusCode("200").count should be(10)
- snapshot.countsPerStatusCode("400").count should be(5)
+ val snapshot = takeSnapshotOf("spray-server", "http-server")
+ snapshot.counter("GET: /record-http-metrics_200").get.count should be(10)
+ snapshot.counter("GET: /record-http-metrics_400").get.count should be(5)
+ snapshot.counter("200").get.count should be(10)
+ snapshot.counter("400").get.count should be(5)
}
}
}
diff --git a/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
index 48253b1d..bfd88ac8 100644
--- a/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
@@ -17,43 +17,15 @@
package kamon.spray
import _root_.spray.httpx.RequestBuilding
-import akka.testkit.{ TestKitBase, TestProbe }
-import akka.actor.ActorSystem
-import org.scalatest.{ Matchers, WordSpecLike }
+import akka.testkit.TestProbe
+import kamon.testkit.BaseKamonSpec
import kamon.Kamon
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
import spray.http.HttpHeaders.RawHeader
import spray.http.{ HttpResponse, HttpRequest }
-import kamon.metric.{ TraceMetrics, Metrics }
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import com.typesafe.config.ConfigFactory
-import kamon.metric.TraceMetrics.ElapsedTime
-import kamon.metric.instrument.Histogram
-
-class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
- with ScalaFutures with PatienceConfiguration with TestServer {
-
- implicit lazy val system: ActorSystem = ActorSystem("spray-server-tracing-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | metrics {
- | tick-interval = 2 seconds
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+
+class SprayServerTracingSpec extends BaseKamonSpec("spray-server-tracing-spec") with RequestBuilding with ScalaFutures
+ with PatienceConfiguration with TestServer {
"the spray server request tracing instrumentation" should {
"include the trace-token header in responses when the automatic-trace-token-propagation is enabled" in {
@@ -62,12 +34,12 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
- client.send(connection, Get("/").withHeaders(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-enabled")))
+ client.send(connection, Get("/").withHeaders(traceTokenHeader("propagation-enabled")))
server.expectMsgType[HttpRequest]
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-enabled"))
+ response.headers should contain(traceTokenHeader("propagation-enabled"))
}
"reply back with an automatically assigned trace token if none was provided with the request and automatic-trace-token-propagation is enabled" in {
@@ -81,7 +53,7 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers.filter(_.name == Kamon(Spray).traceTokenHeaderName).size should be(1)
+ response.headers.count(_.name == Kamon(Spray).settings.traceTokenHeaderName) should be(1)
}
@@ -91,21 +63,24 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
- client.send(connection, Get("/").withHeaders(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-disabled")))
+ client.send(connection, Get("/").withHeaders(traceTokenHeader("propagation-disabled")))
server.expectMsgType[HttpRequest]
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-disabled"))
+ response.headers should not contain traceTokenHeader("propagation-disabled")
}
}
+ def traceTokenHeader(token: String): RawHeader =
+ RawHeader(Kamon(Spray).settings.traceTokenHeaderName, token)
+
def enableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(true)
def disableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(false)
def setIncludeTraceToken(include: Boolean): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("includeTraceToken")
+ val target = Kamon(Spray).settings
+ val field = target.getClass.getDeclaredField("includeTraceTokenHeader")
field.setAccessible(true)
field.set(target, include)
}
diff --git a/kamon-statsd/src/main/resources/reference.conf b/kamon-statsd/src/main/resources/reference.conf
index a10ac735..f26ce98b 100644
--- a/kamon-statsd/src/main/resources/reference.conf
+++ b/kamon-statsd/src/main/resources/reference.conf
@@ -12,18 +12,19 @@ kamon {
# Interval between metrics data flushes to StatsD. It's value must be equal or greater than the
# kamon.metrics.tick-interval setting.
- flush-interval = 1 second
+ flush-interval = 10 seconds
# Max packet size for UDP metrics data sent to StatsD.
max-packet-size = 1024 bytes
# Subscription patterns used to select which metrics will be pushed to StatsD. Note that first, metrics
# collection for your desired entities must be activated under the kamon.metrics.filters settings.
- includes {
- actor = [ "*" ]
- trace = [ "*" ]
- dispatcher = [ "*" ]
- router = [ "*" ]
+ subscriptions {
+ trace = [ "**" ]
+ actor = [ "**" ]
+ dispatcher = [ "**" ]
+ user-metric = [ "**" ]
+ system-metric = [ "**" ]
}
# Enable system metrics
@@ -61,4 +62,12 @@ kamon {
metric-name-normalization-strategy = normalize
}
}
+
+ modules {
+ kamon-statsd {
+ auto-start = yes
+ requires-aspectj = no
+ extension-id = "kamon.statsd.StatsD"
+ }
+ }
} \ No newline at end of file
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala b/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
index 28354423..0fce855c 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
@@ -3,11 +3,10 @@ package kamon.statsd
import java.lang.management.ManagementFactory
import com.typesafe.config.Config
-import kamon.metric.UserMetrics.UserMetricGroup
-import kamon.metric.{ MetricIdentity, MetricGroupIdentity }
+import kamon.metric.{ MetricKey, Entity }
trait MetricKeyGenerator {
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
+ def generateKey(entity: Entity, metricKey: MetricKey): String
}
class SimpleMetricKeyGenerator(config: Config) extends MetricKeyGenerator {
@@ -27,16 +26,11 @@ class SimpleMetricKeyGenerator(config: Config) extends MetricKeyGenerator {
if (includeHostname) s"$application.$normalizedHostname"
else application
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
- val normalizedGroupName = normalizer(groupIdentity.name)
- val key = s"${baseName}.${groupIdentity.category.name}.${normalizedGroupName}"
-
- if (isUserMetric(groupIdentity)) key
- else s"${key}.${metricIdentity.name}"
+ def generateKey(entity: Entity, metricKey: MetricKey): String = {
+ val normalizedGroupName = normalizer(entity.name)
+ s"${baseName}.${entity.category}.${normalizedGroupName}.${metricKey.name}"
}
- def isUserMetric(groupIdentity: MetricGroupIdentity): Boolean = groupIdentity.isInstanceOf[UserMetricGroup]
-
def hostName: String = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
def createNormalizer(strategy: String): Normalizer = strategy match {
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
index 58fb3658..d406faf6 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
@@ -18,16 +18,14 @@ package kamon.statsd
import akka.actor._
import kamon.Kamon
-import kamon.metric.UserMetrics._
import kamon.metric._
-import kamon.metrics._
+import kamon.util.ConfigTools.Syntax
import scala.concurrent.duration._
-import scala.collection.JavaConverters._
import com.typesafe.config.Config
-import java.lang.management.ManagementFactory
import akka.event.Logging
import java.net.InetSocketAddress
import java.util.concurrent.TimeUnit.MILLISECONDS
+import scala.collection.JavaConverters._
object StatsD extends ExtensionId[StatsDExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = StatsD
@@ -35,59 +33,31 @@ object StatsD extends ExtensionId[StatsDExtension] with ExtensionIdProvider {
}
class StatsDExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ implicit val as = system
+
val log = Logging(system, classOf[StatsDExtension])
log.info("Starting the Kamon(StatsD) extension")
private val config = system.settings.config
private val statsDConfig = config.getConfig("kamon.statsd")
+ val metricsExtension = Kamon.metrics
- val tickInterval = config.getMilliseconds("kamon.metrics.tick-interval")
+ val tickInterval = metricsExtension.settings.tickInterval
val statsDHost = new InetSocketAddress(statsDConfig.getString("hostname"), statsDConfig.getInt("port"))
- val flushInterval = statsDConfig.getMilliseconds("flush-interval")
+ val flushInterval = statsDConfig.getFiniteDuration("flush-interval")
val maxPacketSizeInBytes = statsDConfig.getBytes("max-packet-size")
val keyGeneratorFQCN = statsDConfig.getString("metric-key-generator")
val statsDMetricsListener = buildMetricsListener(tickInterval, flushInterval, keyGeneratorFQCN, config)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", statsDMetricsListener, permanently = true)
-
- // Subscribe to Actors
- val includedActors = statsDConfig.getStringList("includes.actor").asScala
- for (actorPathPattern ← includedActors) {
- Kamon(Metrics)(system).subscribe(ActorMetrics, actorPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Routers
- val includedRouters = statsDConfig.getStringList("includes.router").asScala
- for (routerPathPattern ← includedRouters) {
- Kamon(Metrics)(system).subscribe(RouterMetrics, routerPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Traces
- val includedTraces = statsDConfig.getStringList("includes.trace").asScala
- for (tracePathPattern ← includedTraces) {
- Kamon(Metrics)(system).subscribe(TraceMetrics, tracePathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Dispatchers
- val includedDispatchers = statsDConfig.getStringList("includes.dispatcher").asScala
- for (dispatcherPathPattern ← includedDispatchers) {
- Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to SystemMetrics
- val includeSystemMetrics = statsDConfig.getBoolean("report-system-metrics")
- if (includeSystemMetrics) {
- Seq(CPUMetrics, ProcessCPUMetrics, MemoryMetrics, NetworkMetrics, GCMetrics, HeapMetrics, ContextSwitchesMetrics) foreach { metric ⇒
- Kamon(Metrics)(system).subscribe(metric, "*", statsDMetricsListener, permanently = true)
+ val subscriptions = statsDConfig.getConfig("subscriptions")
+ subscriptions.firstLevelKeys.map { subscriptionCategory ⇒
+ subscriptions.getStringList(subscriptionCategory).asScala.foreach { pattern ⇒
+ metricsExtension.subscribe(subscriptionCategory, pattern, statsDMetricsListener, permanently = true)
}
}
- def buildMetricsListener(tickInterval: Long, flushInterval: Long, keyGeneratorFQCN: String, config: Config): ActorRef = {
+ def buildMetricsListener(tickInterval: FiniteDuration, flushInterval: FiniteDuration, keyGeneratorFQCN: String, config: Config): ActorRef = {
assert(flushInterval >= tickInterval, "StatsD flush-interval needs to be equal or greater to the tick-interval")
val keyGenerator = system.dynamicAccess.createInstanceFor[MetricKeyGenerator](keyGeneratorFQCN, (classOf[Config], config) :: Nil).get
@@ -100,7 +70,7 @@ class StatsDExtension(system: ExtendedActorSystem) extends Kamon.Extension {
// No need to buffer the metrics, let's go straight to the metrics sender.
metricsSender
} else {
- system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsSender), "statsd-metrics-buffer")
+ system.actorOf(TickMetricSnapshotBuffer.props(flushInterval, metricsSender), "statsd-metrics-buffer")
}
}
} \ No newline at end of file
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
index 2aac3a52..3241e1f3 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
@@ -20,7 +20,7 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
import java.util.Locale
@@ -51,11 +51,11 @@ class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long,
val packetBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
for (
- (groupIdentity, groupSnapshot) ← tick.metrics;
- (metricIdentity, metricSnapshot) ← groupSnapshot.metrics
+ (entity, snapshot) ← tick.metrics;
+ (metricKey, metricSnapshot) ← snapshot.metrics
) {
- val key = metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ val key = metricKeyGenerator.generateKey(entity, metricKey)
metricSnapshot match {
case hs: Histogram.Snapshot ⇒
diff --git a/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala b/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
index ed3fae5b..0edeb3df 100644
--- a/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
+++ b/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
@@ -1,7 +1,8 @@
package kamon.statsd
import com.typesafe.config.ConfigFactory
-import kamon.metric.{ MetricGroupCategory, MetricGroupIdentity, MetricIdentity }
+import kamon.metric.instrument.UnitOfMeasurement
+import kamon.metric._
import org.scalatest.{ Matchers, WordSpec }
class SimpleMetricKeyGeneratorSpec extends WordSpec with Matchers {
@@ -68,13 +69,8 @@ class SimpleMetricKeyGeneratorSpec extends WordSpec with Matchers {
}
def buildMetricKey(categoryName: String, entityName: String, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
- val metricIdentity = new MetricIdentity { val name: String = metricName }
- val groupIdentity = new MetricGroupIdentity {
- val name: String = entityName
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = categoryName
- }
- }
- metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ val metric = HistogramKey(metricName, UnitOfMeasurement.Unknown, Map.empty)
+ val entity = Entity(entityName, categoryName)
+ metricKeyGenerator.generateKey(entity, metric)
}
}
diff --git a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
index 5d37bb75..0211ac0f 100644
--- a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
+++ b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
@@ -19,95 +19,85 @@ package kamon.statsd
import akka.testkit.{ TestKitBase, TestProbe }
import akka.actor.{ ActorRef, Props, ActorSystem }
import kamon.Kamon
-import kamon.metric.instrument.Histogram.Precision
-import kamon.metric.instrument.Histogram
+import kamon.metric.instrument.{ InstrumentFactory, UnitOfMeasurement }
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
import org.scalatest.{ Matchers, WordSpecLike }
import kamon.metric._
import akka.io.Udp
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import java.lang.management.ManagementFactory
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
-class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("statsd-metric-sender-spec", ConfigFactory.parseString(
- """
- |kamon {
- | metrics {
- | disable-aspectj-weaver-missing-error = true
- | }
- |
- | statsd.simple-metric-key-generator {
- | application = kamon
- | hostname-override = kamon-host
- | include-hostname = true
- | metric-name-normalization-strategy = normalize
- | }
- |}
- |
- """.stripMargin))
+class StatsDMetricSenderSpec extends BaseKamonSpec("statsd-metric-sender-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | statsd.simple-metric-key-generator {
+ | application = kamon
+ | hostname-override = kamon-host
+ | include-hostname = true
+ | metric-name-normalization-strategy = normalize
+ | }
+ |}
+ |
+ """.stripMargin)
implicit val metricKeyGenerator = new SimpleMetricKeyGenerator(system.settings.config) {
override def hostName: String = "localhost_local"
}
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
-
"the StatsDMetricSender" should {
-
"flush the metrics data after processing the tick, even if the max-packet-size is not reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms")
}
"render several measurements of the same key under a single (key + multiple measurements) packet" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(11L)
- testRecorder.record(12L)
-
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(11L)
+ testRecorder.metricOne.record(12L)
+
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms:11|ms:12|ms")
}
"include the correspondent sampling rate when rendering multiple occurrences of the same value" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(10L)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms|@0.5")
}
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
var bytes = testMetricKey.length
var level = 0
while (bytes <= testMaxPacketSize) {
level += 1
- testRecorder.record(level)
+ testRecorder.metricOne.record(level)
bytes += s":$level|ms".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
@@ -115,51 +105,38 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
}
"render multiple keys in the same packet using newline as separator" in new UdpListenerFixture {
- val firstTestMetricName = "first-test-metric"
- val firstTestMetricKey = buildMetricKey("actor", "/user/kamon", firstTestMetricName)
- val secondTestMetricName = "second-test-metric"
- val secondTestMetricKey = buildMetricKey("actor", "/user/kamon", secondTestMetricName)
+ val testMetricKey1 = buildMetricKey(testEntity, "metric-one")
+ val testMetricKey2 = buildMetricKey(testEntity, "metric-two")
+ val testRecorder = buildRecorder("user/kamon")
- val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(11L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(11L)
+ testRecorder.metricTwo.record(20L)
+ testRecorder.metricTwo.record(21L)
- secondTestRecorder.record(20L)
- secondTestRecorder.record(21L)
-
- val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(collectionContext),
- secondTestMetricName -> secondTestRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"$firstTestMetricKey:10|ms|@0.5:11|ms\n$secondTestMetricKey:20|ms:21|ms")
+ data.utf8String should be(s"$testMetricKey1:10|ms|@0.5:11|ms\n$testMetricKey2:20|ms:21|ms")
}
}
trait UdpListenerFixture {
val testMaxPacketSize = system.settings.config.getBytes("kamon.statsd.max-packet-size")
- val testGroupIdentity = new MetricGroupIdentity {
- val name: String = "/user/kamon"
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = "actor"
- }
+ val testEntity = Entity("user/kamon", "test")
+
+ def buildMetricKey(entity: Entity, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
+ val metricKey = HistogramKey(metricName, UnitOfMeasurement.Unknown, Map.empty)
+ metricKeyGenerator.generateKey(entity, metricKey)
}
- def buildMetricKey(categoryName: String, entityName: String, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
- val metricIdentity = new MetricIdentity { val name: String = metricName }
- val groupIdentity = new MetricGroupIdentity {
- val name: String = entityName
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = categoryName
- }
- }
- metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ def buildRecorder(name: String): TestEntityRecorder = {
+ Kamon.metrics.register(TestEntityRecorder, name).get.recorder
}
- def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
+ def setup(metrics: Map[Entity, EntitySnapshot]): TestProbe = {
val udp = TestProbe()
val metricsSender = system.actorOf(Props(new StatsDMetricsSender(new InetSocketAddress("127.0.0.1", 0), testMaxPacketSize, metricKeyGenerator) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
@@ -169,22 +146,19 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
udp.expectMsgType[Udp.SimpleSender]
udp.reply(Udp.SimpleSenderReady)
- val testMetrics = for ((metricName, snapshot) ← metrics) yield {
- val testMetricIdentity = new MetricIdentity {
- val name: String = metricName
- }
-
- (testMetricIdentity, snapshot)
- }
-
- metricsSender ! TickMetricSnapshot(0, 0, Map(testGroupIdentity -> new MetricGroupSnapshot {
- type GroupSnapshotType = Histogram.Snapshot
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
- }))
-
+ val fakeSnapshot = TickMetricSnapshot(MilliTimestamp.now, MilliTimestamp.now, metrics)
+ metricsSender ! fakeSnapshot
udp
}
}
}
+
+class TestEntityRecorder(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val metricOne = histogram("metric-one")
+ val metricTwo = histogram("metric-two")
+}
+
+object TestEntityRecorder extends EntityRecorderFactory[TestEntityRecorder] {
+ def category: String = "test"
+ def createRecorder(instrumentFactory: InstrumentFactory): TestEntityRecorder = new TestEntityRecorder(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/index b/kamon-system-metrics/src/main/resources/kamon/system/sigar/index
deleted file mode 100644
index cad1f326..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/index
+++ /dev/null
@@ -1,21 +0,0 @@
-libsigar-amd64-freebsd-6.so
-libsigar-amd64-linux.so
-libsigar-amd64-solaris.so
-libsigar-ia64-hpux-11.sl
-libsigar-ia64-linux.so
-libsigar-pa-hpux-11.sl
-libsigar-ppc64-aix-5.so
-libsigar-ppc64-linux.so
-libsigar-ppc-aix-5.so
-libsigar-ppc-linux.so
-libsigar-s390x-linux.so
-libsigar-sparc64-solaris.so
-libsigar-sparc-solaris.so
-libsigar-universal64-macosx.dylib
-libsigar-universal-macosx.dylib
-libsigar-x86-freebsd-5.so
-libsigar-x86-freebsd-6.so
-libsigar-x86-linux.so
-libsigar-x86-solaris.so
-sigar-amd64-winnt.dll
-sigar-x86-winnt.dll \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-freebsd-6.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-freebsd-6.so
deleted file mode 100644
index 3e94f0d2..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-freebsd-6.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-linux.so
deleted file mode 100644
index 5a2e4c24..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-solaris.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-solaris.so
deleted file mode 100644
index 6396482a..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-amd64-solaris.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-hpux-11.sl b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-hpux-11.sl
deleted file mode 100644
index d92ea4a9..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-hpux-11.sl
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-linux.so
deleted file mode 100644
index 2bd2fc8e..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ia64-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-pa-hpux-11.sl b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-pa-hpux-11.sl
deleted file mode 100644
index 0dfd8a11..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-pa-hpux-11.sl
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-aix-5.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-aix-5.so
deleted file mode 100644
index 7d4b5199..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-aix-5.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-linux.so
deleted file mode 100644
index 4394b1b0..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-aix-5.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-aix-5.so
deleted file mode 100644
index 35fd8288..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-aix-5.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-linux.so
deleted file mode 100644
index a1ba2529..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-ppc64-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-s390x-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-s390x-linux.so
deleted file mode 100644
index c275f4ac..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-s390x-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc-solaris.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc-solaris.so
deleted file mode 100644
index aa847d2b..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc-solaris.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc64-solaris.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc64-solaris.so
deleted file mode 100644
index 6c4fe809..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-sparc64-solaris.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal-macosx.dylib b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal-macosx.dylib
deleted file mode 100644
index 27ab1071..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal-macosx.dylib
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal64-macosx.dylib b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal64-macosx.dylib
deleted file mode 100644
index 0c721fec..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-universal64-macosx.dylib
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-5.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-5.so
deleted file mode 100644
index 8c50c611..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-5.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-6.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-6.so
deleted file mode 100644
index f0800274..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-freebsd-6.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-linux.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-linux.so
deleted file mode 100644
index a0b64edd..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-linux.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-solaris.so b/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-solaris.so
deleted file mode 100644
index c6452e56..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/libsigar-x86-solaris.so
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-amd64-winnt.dll b/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-amd64-winnt.dll
deleted file mode 100644
index 1ec8a035..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-amd64-winnt.dll
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-x86-winnt.dll b/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-x86-winnt.dll
deleted file mode 100644
index 6afdc016..00000000
--- a/kamon-system-metrics/src/main/resources/kamon/system/sigar/sigar-x86-winnt.dll
+++ /dev/null
Binary files differ
diff --git a/kamon-system-metrics/src/main/resources/reference.conf b/kamon-system-metrics/src/main/resources/reference.conf
index fbdb3b89..57b34195 100644
--- a/kamon-system-metrics/src/main/resources/reference.conf
+++ b/kamon-system-metrics/src/main/resources/reference.conf
@@ -3,74 +3,187 @@
# ============================================ #
kamon {
- metrics {
- precision {
- system {
- process-cpu {
- cpu-percentage = {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- total-process-time = {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- }
-
- cpu {
- user = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- system = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- wait = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- idle ={
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- }
-
- network {
- rx-bytes = ${kamon.metrics.precision.default-histogram-precision}
- tx-bytes = ${kamon.metrics.precision.default-histogram-precision}
- rx-errors = ${kamon.metrics.precision.default-histogram-precision}
- tx-errors = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- memory {
- used = ${kamon.metrics.precision.default-histogram-precision}
- free = ${kamon.metrics.precision.default-histogram-precision}
- buffer = ${kamon.metrics.precision.default-histogram-precision}
- cache = ${kamon.metrics.precision.default-histogram-precision}
- swap-used = ${kamon.metrics.precision.default-histogram-precision}
- swap-free = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- context-switches {
- per-process-voluntary = ${kamon.metrics.precision.default-histogram-precision}
- per-process-non-voluntary = ${kamon.metrics.precision.default-histogram-precision}
- global = ${kamon.metrics.precision.default-histogram-precision}
- }
+ system-metrics {
+
+ # Sigar provisioner native library extract location. Use per-application-instance scoped location, such as program
+ # working directory.
+ sigar-native-folder = ${user.dir}"/native"
+
+ # Frequency with which all Sigar-based metrics will be updated. Setting this value to less than 1 second
+ # might cause some Sigar metrics to behave incorrectly.
+ sigar-metrics-refresh-interval = 1 second
+
+ # Frequency with which context-switches metrics will be updated.
+ context-switches-refresh-interval = 1 second
+
+ # Dispatcher to be used by the SigarMetricsUpdater actor.
+ sigar-dispatcher {
+ executor = "thread-pool-executor"
+ type = PinnedDispatcher
+ }
+
+ # Dispatcher to be used by the ContextSwitchesUpdater actor.
+ context-switches-dispatcher {
+ executor = "thread-pool-executor"
+ type = PinnedDispatcher
+ }
+ }
+
+ metrics.instrument-settings {
+ system-metric {
+
+ #
+ # CPU
+ #
+ cpu-user {
+ highest-trackable-value = 100
+ }
+
+ cpu-system = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-wait = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-idle = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-stolen = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+
+
+ #
+ # Process CPU
+ #
+ process-user-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ process-system-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ process-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+
+
+ #
+ # Garbage Collection
+ #
+ garbage-collection-count {
+ highest-trackable-value = 1000000
+ refresh-interval = 1 second
+ }
+
+ garbage-collection-time {
+ highest-trackable-value = 3600000
+ refresh-interval = 1 second
+ }
+
+
+ #
+ # Heap Memory
+ #
+ heap-used {
+ # 50 GB, which is way too much for a non-Zing JVM
+ highest-trackable-value = 5368709120
+ refresh-interval = 1 second
+ }
+
+ heap-max = ${kamon.metrics.instrument-settings.system-metric.heap-used}
+ heap-committed = ${kamon.metrics.instrument-settings.system-metric.heap-used}
+
+
+ #
+ # Non-Heap Memory
+ #
+ non-heap-used {
+ highest-trackable-value = 5368709120
+ refresh-interval = 1 second
+ }
+ non-heap-max = ${kamon.metrics.instrument-settings.system-metric.non-heap-used}
+ non-heap-committed = ${kamon.metrics.instrument-settings.system-metric.non-heap-used}
+
+
+ #
+ # JVM Threads
+ #
+ thread-count {
+ highest-trackable-value = 10000
+ refresh-interval = 1 second
+ }
+
+ daemon-thread-count = ${kamon.metrics.instrument-settings.system-metric.thread-count}
+ peak-thread-count = ${kamon.metrics.instrument-settings.system-metric.thread-count}
+
+
+ #
+ # Class Loading
+ #
+ classes-loaded {
+ highest-trackable-value = 10000000
+ refresh-interval = 1 second
}
- jvm {
- heap {
- used = ${kamon.metrics.precision.default-gauge-precision}
- max = ${kamon.metrics.precision.default-gauge-precision}
- committed = ${kamon.metrics.precision.default-gauge-precision}
- }
-
- gc {
- count = ${kamon.metrics.precision.default-gauge-precision}
- time = ${kamon.metrics.precision.default-gauge-precision}
- }
+ classes-unloaded = ${kamon.metrics.instrument-settings.system-metric.classes-loaded}
+ classes-currently-loaded = ${kamon.metrics.instrument-settings.system-metric.classes-loaded}
+
+
+ #
+ # File System
+ #
+ file-system-reads {
+ highest-trackable-value = 107374182400
}
+
+ file-system-writes = ${kamon.metrics.instrument-settings.system-metric.file-system-reads}
+
+
+ #
+ # Load Average
+ #
+ one-minute {
+ highest-trackable-value = 10000
+ }
+
+ five-minutes = ${kamon.metrics.instrument-settings.system-metric.one-minute}
+ fifteen-minutes = ${kamon.metrics.instrument-settings.system-metric.one-minute}
+
+
+ #
+ # System Memory
+ #
+ memory-used {
+ highest-trackable-value = 5368709120
+ }
+
+ memory-free = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+ swap-free = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+ swap-used = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+
+
+ #
+ # Network
+ #
+ tx-bytes {
+ highest-trackable-value = 107374182400
+ }
+
+ rx-bytes = ${kamon.metrics.instrument-settings.system-metric.tx-bytes}
+
+ tx-errors {
+ highest-trackable-value = 10000000
+ }
+
+ rx-errors = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+ tx-dropped = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+ rx-dropped = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+
+
+ #
+ # Context Switches
+ #
+ context-switches-process-voluntary {
+ highest-trackable-value = 10000000
+ }
+
+ context-switches-process-non-voluntary = ${kamon.metrics.instrument-settings.system-metric.context-switches-process-voluntary}
+ context-switches-global = ${kamon.metrics.instrument-settings.system-metric.context-switches-process-voluntary}
+
+ }
+ }
+
+ modules {
+ kamon-system-metrics {
+ auto-start = yes
+ requires-aspectj = no
+ extension-id = "kamon.system.SystemMetrics"
}
}
} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
deleted file mode 100644
index ef7f225c..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class CPUMetrics(name: String) extends MetricGroupIdentity {
- val category = CPUMetrics
-}
-
-object CPUMetrics extends MetricGroupCategory {
- val name = "cpu"
-
- case object User extends MetricIdentity { val name = "user" }
- case object System extends MetricIdentity { val name = "system" }
- case object Wait extends MetricIdentity { val name = "wait" }
- case object Idle extends MetricIdentity { val name = "idle" }
-
- case class CPUMetricRecorder(user: Histogram, system: Histogram, cpuWait: Histogram, idle: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- CPUMetricSnapshot(user.collect(context), system.collect(context), cpuWait.collect(context), idle.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class CPUMetricSnapshot(user: Histogram.Snapshot, system: Histogram.Snapshot, cpuWait: Histogram.Snapshot, idle: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = CPUMetricSnapshot
-
- def merge(that: CPUMetricSnapshot, context: CollectionContext): GroupSnapshotType = {
- CPUMetricSnapshot(user.merge(that.user, context), system.merge(that.system, context), cpuWait.merge(that.cpuWait, context), idle.merge(that.idle, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- User -> user,
- System -> system,
- Wait -> cpuWait,
- Idle -> idle)
- }
-
- val Factory = CPUMetricGroupFactory
-}
-
-case object CPUMetricGroupFactory extends MetricGroupFactory {
-
- import CPUMetrics._
-
- type GroupRecorder = CPUMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.cpu")
-
- val userConfig = settings.getConfig("user")
- val systemConfig = settings.getConfig("system")
- val cpuWaitConfig = settings.getConfig("wait")
- val idleConfig = settings.getConfig("idle")
-
- new CPUMetricRecorder(
- Histogram.fromConfig(userConfig),
- Histogram.fromConfig(systemConfig),
- Histogram.fromConfig(cpuWaitConfig),
- Histogram.fromConfig(idleConfig))
- }
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala
deleted file mode 100644
index 86aeabce..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class ContextSwitchesMetrics(name: String) extends MetricGroupIdentity {
- val category = ContextSwitchesMetrics
-}
-
-object ContextSwitchesMetrics extends MetricGroupCategory {
- val name = "context-switches"
-
- case object PerProcessVoluntary extends MetricIdentity { val name = "per-process-voluntary" }
- case object PerProcessNonVoluntary extends MetricIdentity { val name = "per-process-non-voluntary" }
- case object Global extends MetricIdentity { val name = "global" }
-
- case class ContextSwitchesMetricsRecorder(perProcessVoluntary: Histogram, perProcessNonVoluntary: Histogram, global: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ContextSwitchesMetricsSnapshot(perProcessVoluntary.collect(context), perProcessNonVoluntary.collect(context), global.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ContextSwitchesMetricsSnapshot(perProcessVoluntary: Histogram.Snapshot, perProcessNonVoluntary: Histogram.Snapshot, global: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ContextSwitchesMetricsSnapshot
-
- def merge(that: ContextSwitchesMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
- ContextSwitchesMetricsSnapshot(perProcessVoluntary.merge(that.perProcessVoluntary, context), perProcessVoluntary.merge(that.perProcessVoluntary, context), global.merge(that.global, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- PerProcessVoluntary -> perProcessVoluntary,
- PerProcessNonVoluntary -> perProcessNonVoluntary,
- Global -> global)
- }
-
- val Factory = ContextSwitchesMetricGroupFactory
-}
-
-case object ContextSwitchesMetricGroupFactory extends MetricGroupFactory {
- import ContextSwitchesMetrics._
-
- type GroupRecorder = ContextSwitchesMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.context-switches")
-
- val perProcessVoluntary = settings.getConfig("per-process-voluntary")
- val perProcessNonVoluntary = settings.getConfig("per-process-non-voluntary")
- val global = settings.getConfig("global")
-
- new ContextSwitchesMetricsRecorder(
- Histogram.fromConfig(perProcessVoluntary),
- Histogram.fromConfig(perProcessNonVoluntary),
- Histogram.fromConfig(global))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
deleted file mode 100644
index bc5fc724..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.GarbageCollectorMXBean
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class GCMetrics(name: String) extends MetricGroupIdentity {
- val category = GCMetrics
-}
-
-object GCMetrics extends MetricGroupCategory {
- val name = "gc"
-
- case object CollectionCount extends MetricIdentity { val name = "collection-count" }
- case object CollectionTime extends MetricIdentity { val name = "collection-time" }
-
- case class GCMetricRecorder(count: Gauge, time: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- GCMetricSnapshot(count.collect(context), time.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class GCMetricSnapshot(count: Histogram.Snapshot, time: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = GCMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- GCMetricSnapshot(count.merge(that.count, context), time.merge(that.time, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- CollectionCount -> count,
- CollectionTime -> time)
- }
-
- def Factory(gc: GarbageCollectorMXBean) = GCMetricGroupFactory(gc)
-}
-
-case class GCMetricGroupFactory(gc: GarbageCollectorMXBean) extends MetricGroupFactory {
- import GCMetrics._
-
- type GroupRecorder = GCMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.gc")
-
- val countConfig = settings.getConfig("count")
- val timeConfig = settings.getConfig("time")
-
- new GCMetricRecorder(
- Gauge.fromConfig(countConfig, system)(() ⇒ gc.getCollectionCount),
- Gauge.fromConfig(timeConfig, system, Scale.Milli)(() ⇒ gc.getCollectionTime))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
deleted file mode 100644
index ac033fe2..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.ManagementFactory
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class HeapMetrics(name: String) extends MetricGroupIdentity {
- val category = HeapMetrics
-}
-
-object HeapMetrics extends MetricGroupCategory {
- val name = "heap"
-
- case object Used extends MetricIdentity { val name = "used-heap" }
- case object Max extends MetricIdentity { val name = "max-heap" }
- case object Committed extends MetricIdentity { val name = "committed-heap" }
-
- case class HeapMetricRecorder(used: Gauge, max: Gauge, committed: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- HeapMetricSnapshot(used.collect(context), max.collect(context), committed.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class HeapMetricSnapshot(used: Histogram.Snapshot, max: Histogram.Snapshot, committed: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = HeapMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- HeapMetricSnapshot(used.merge(that.used, context), max.merge(that.max, context), committed.merge(that.committed, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Used -> used,
- Max -> max,
- Committed -> committed)
- }
-
- val Factory = HeapMetricGroupFactory
-}
-
-case object HeapMetricGroupFactory extends MetricGroupFactory {
-
- import HeapMetrics._
- import kamon.system.SystemMetricsExtension._
-
- def heap = ManagementFactory.getMemoryMXBean.getHeapMemoryUsage
-
- type GroupRecorder = HeapMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.heap")
-
- val usedHeapConfig = settings.getConfig("used")
- val maxHeapConfig = settings.getConfig("max")
- val committedHeapConfig = settings.getConfig("committed")
-
- new HeapMetricRecorder(
- Gauge.fromConfig(usedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getUsed)),
- Gauge.fromConfig(maxHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getMax)),
- Gauge.fromConfig(committedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getCommitted)))
- }
-
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
deleted file mode 100644
index 14051427..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class MemoryMetrics(name: String) extends MetricGroupIdentity {
- val category = MemoryMetrics
-}
-
-object MemoryMetrics extends MetricGroupCategory {
- val name = "memory"
-
- case object Used extends MetricIdentity { val name = "used" }
- case object Free extends MetricIdentity { val name = "free" }
- case object Buffer extends MetricIdentity { val name = "buffer" }
- case object Cache extends MetricIdentity { val name = "cache" }
- case object SwapUsed extends MetricIdentity { val name = "swap-used" }
- case object SwapFree extends MetricIdentity { val name = "swap-free" }
-
- case class MemoryMetricRecorder(used: Histogram, free: Histogram, buffer: Histogram, cache: Histogram, swapUsed: Histogram, swapFree: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- MemoryMetricSnapshot(used.collect(context), free.collect(context), buffer.collect(context), cache.collect(context), swapUsed.collect(context), swapFree.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class MemoryMetricSnapshot(used: Histogram.Snapshot, free: Histogram.Snapshot, buffer: Histogram.Snapshot, cache: Histogram.Snapshot, swapUsed: Histogram.Snapshot, swapFree: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = MemoryMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- MemoryMetricSnapshot(used.merge(that.used, context), free.merge(that.free, context), buffer.merge(that.buffer, context), cache.merge(that.cache, context), swapUsed.merge(that.swapUsed, context), swapFree.merge(that.swapFree, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Used -> used,
- Free -> free,
- Buffer -> buffer,
- Cache -> cache,
- SwapUsed -> swapUsed,
- SwapFree -> swapFree)
- }
-
- val Factory = MemoryMetricGroupFactory
-}
-
-case object MemoryMetricGroupFactory extends MetricGroupFactory {
-
- import MemoryMetrics._
-
- type GroupRecorder = MemoryMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.memory")
-
- val usedConfig = settings.getConfig("used")
- val freeConfig = settings.getConfig("free")
- val bufferConfig = settings.getConfig("buffer")
- val cacheConfig = settings.getConfig("cache")
- val swapUsedConfig = settings.getConfig("swap-used")
- val swapFreeConfig = settings.getConfig("swap-free")
-
- new MemoryMetricRecorder(
- Histogram.fromConfig(usedConfig, Scale.Mega),
- Histogram.fromConfig(freeConfig, Scale.Mega),
- Histogram.fromConfig(swapUsedConfig, Scale.Mega),
- Histogram.fromConfig(swapFreeConfig, Scale.Mega),
- Histogram.fromConfig(bufferConfig, Scale.Mega),
- Histogram.fromConfig(cacheConfig, Scale.Mega))
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
deleted file mode 100644
index f348bb0c..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class NetworkMetrics(name: String) extends MetricGroupIdentity {
- val category = NetworkMetrics
-}
-
-object NetworkMetrics extends MetricGroupCategory {
- val name = "network"
-
- case object RxBytes extends MetricIdentity { val name = "rx-bytes" }
- case object TxBytes extends MetricIdentity { val name = "tx-bytes" }
- case object RxErrors extends MetricIdentity { val name = "rx-errors" }
- case object TxErrors extends MetricIdentity { val name = "tx-errors" }
-
- case class NetworkMetricRecorder(rxBytes: Histogram, txBytes: Histogram, rxErrors: Histogram, txErrors: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- NetworkMetricSnapshot(rxBytes.collect(context), txBytes.collect(context), rxErrors.collect(context), txErrors.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class NetworkMetricSnapshot(rxBytes: Histogram.Snapshot, txBytes: Histogram.Snapshot, rxErrors: Histogram.Snapshot, txErrors: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = NetworkMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- NetworkMetricSnapshot(rxBytes.merge(that.rxBytes, context), txBytes.merge(that.txBytes, context), rxErrors.merge(that.rxErrors, context), txErrors.merge(that.txErrors, context))
- }
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- RxBytes -> rxBytes,
- TxBytes -> txBytes,
- RxErrors -> rxErrors,
- TxErrors -> txErrors)
- }
-
- val Factory = NetworkMetricGroupFactory
-}
-
-case object NetworkMetricGroupFactory extends MetricGroupFactory {
- import NetworkMetrics._
-
- type GroupRecorder = NetworkMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.network")
-
- val rxBytesConfig = settings.getConfig("rx-bytes")
- val txBytesConfig = settings.getConfig("tx-bytes")
- val rxErrorsConfig = settings.getConfig("rx-errors")
- val txErrorsConfig = settings.getConfig("tx-errors")
-
- new NetworkMetricRecorder(
- Histogram.fromConfig(rxBytesConfig, Scale.Kilo),
- Histogram.fromConfig(txBytesConfig, Scale.Kilo),
- Histogram.fromConfig(rxErrorsConfig),
- Histogram.fromConfig(txErrorsConfig))
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
deleted file mode 100644
index ebd79d48..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class ProcessCPUMetrics(name: String) extends MetricGroupIdentity {
- val category = ProcessCPUMetrics
-}
-
-object ProcessCPUMetrics extends MetricGroupCategory {
- val name = "proc-cpu"
-
- case object CpuPercent extends MetricIdentity { val name = "cpu-percentage" }
- case object TotalProcessTime extends MetricIdentity { val name = "total-process-time" }
-
- case class ProcessCPUMetricsRecorder(cpuPercent: Histogram, totalProcessTime: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ProcessCPUMetricsSnapshot(cpuPercent.collect(context), totalProcessTime.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ProcessCPUMetricsSnapshot(cpuPercent: Histogram.Snapshot, totalProcessTime: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ProcessCPUMetricsSnapshot
-
- def merge(that: ProcessCPUMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
- ProcessCPUMetricsSnapshot(cpuPercent.merge(that.cpuPercent, context), totalProcessTime.merge(that.totalProcessTime, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- CpuPercent -> cpuPercent,
- TotalProcessTime -> totalProcessTime)
- }
-
- val Factory = ProcessCPUMetricGroupFactory
-}
-
-case object ProcessCPUMetricGroupFactory extends MetricGroupFactory {
- import ProcessCPUMetrics._
-
- type GroupRecorder = ProcessCPUMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.process-cpu")
-
- val cpuPercentageConfig = settings.getConfig("cpu-percentage")
- val totalProcessTimeConfig = settings.getConfig("total-process-time")
-
- new ProcessCPUMetricsRecorder(
- Histogram.fromConfig(cpuPercentageConfig),
- Histogram.fromConfig(totalProcessTimeConfig))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
deleted file mode 100644
index 62ffdb33..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.system
-
-import java.lang.management.ManagementFactory
-
-import akka.actor._
-import akka.event.Logging
-import kamon.Kamon
-import kamon.metric.Metrics
-import kamon.metrics._
-
-import scala.collection.JavaConverters._
-import scala.concurrent.duration._
-
-object SystemMetrics extends ExtensionId[SystemMetricsExtension] with ExtensionIdProvider {
- override def lookup(): ExtensionId[_ <: Extension] = SystemMetrics
-
- override def createExtension(system: ExtendedActorSystem): SystemMetricsExtension = new SystemMetricsExtension(system)
-}
-
-class SystemMetricsExtension(private val system: ExtendedActorSystem) extends Kamon.Extension {
- import kamon.system.SystemMetricsExtension._
-
- val log = Logging(system, classOf[SystemMetricsExtension])
- log.info(s"Starting the Kamon(SystemMetrics) extension")
-
- val systemMetricsExtension = Kamon(Metrics)(system)
-
- //JVM Metrics
- systemMetricsExtension.register(HeapMetrics(Heap), HeapMetrics.Factory)
- garbageCollectors.map { gc ⇒ systemMetricsExtension.register(GCMetrics(gc.getName), GCMetrics.Factory(gc)) }
-
- //System Metrics
- system.actorOf(SystemMetricsCollector.props(1 second), "system-metrics-collector")
-}
-
-object SystemMetricsExtension {
- val CPU = "cpu"
- val ProcessCPU = "process-cpu"
- val Network = "network"
- val Memory = "memory"
- val Heap = "heap"
- val ContextSwitches = "context-switches"
-
- def toKB(value: Long): Long = (value / 1024)
- def toMB(value: Long): Long = (value / 1024 / 1024)
- def toLong(value: Double): Long = math round (value * 100L)
-
- val garbageCollectors = ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid)
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
deleted file mode 100644
index f41a76d5..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.system
-
-import java.io.IOException
-
-import akka.actor.{ ActorLogging, Actor, Props }
-import kamon.Kamon
-import kamon.metric.Metrics
-import kamon.metrics.CPUMetrics.CPUMetricRecorder
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsRecorder
-import kamon.metrics.MemoryMetrics.MemoryMetricRecorder
-import kamon.metrics.NetworkMetrics.NetworkMetricRecorder
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsRecorder
-import kamon.metrics._
-import kamon.system.sigar.SigarHolder
-import org.hyperic.sigar.{ Mem, NetInterfaceStat, SigarProxy }
-
-import scala.concurrent.duration.FiniteDuration
-import scala.io.Source
-
-class SystemMetricsCollector(collectInterval: FiniteDuration) extends Actor with ActorLogging with SigarExtensionProvider {
- import kamon.system.SystemMetricsCollector._
- import kamon.system.SystemMetricsExtension._
-
- val collectSchedule = context.system.scheduler.schedule(collectInterval, collectInterval, self, Collect)(context.dispatcher)
-
- val systemMetricsExtension = Kamon(Metrics)(context.system)
-
- val cpuRecorder = systemMetricsExtension.register(CPUMetrics(CPU), CPUMetrics.Factory)
- val processCpuRecorder = systemMetricsExtension.register(ProcessCPUMetrics(ProcessCPU), ProcessCPUMetrics.Factory)
- val memoryRecorder = systemMetricsExtension.register(MemoryMetrics(Memory), MemoryMetrics.Factory)
- val networkRecorder = systemMetricsExtension.register(NetworkMetrics(Network), NetworkMetrics.Factory)
- val contextSwitchesRecorder = systemMetricsExtension.register(ContextSwitchesMetrics(ContextSwitches), ContextSwitchesMetrics.Factory)
-
- def receive: Receive = {
- case Collect ⇒ collectMetrics()
- }
-
- override def postStop() = collectSchedule.cancel()
-
- def collectMetrics() = {
- cpuRecorder.map(recordCpu)
- processCpuRecorder.map(recordProcessCpu)
- memoryRecorder.map(recordMemory)
- networkRecorder.map(recordNetwork)
-
- if (OsUtils.isLinux)
- contextSwitchesRecorder.map(recordContextSwitches)
- }
-
- private def recordCpu(cpur: CPUMetricRecorder) = {
- val cpuPerc = sigar.getCpuPerc
- cpur.user.record(toLong(cpuPerc.getUser))
- cpur.system.record(toLong(cpuPerc.getSys))
- cpur.cpuWait.record(toLong(cpuPerc.getWait))
- cpur.idle.record(toLong(cpuPerc.getIdle))
- }
-
- private def recordProcessCpu(pcpur: ProcessCPUMetricsRecorder) = {
- val procCpu = sigar.getProcCpu(pid)
- val procTime = sigar.getProcTime(pid)
-
- pcpur.cpuPercent.record(toLong(procCpu.getPercent))
- pcpur.totalProcessTime.record(procTime.getTotal) // gives an idea of what is really measured and then interpreted as %
- }
-
- private def recordMemory(mr: MemoryMetricRecorder) = {
- val mem = sigar.getMem
- val swap = sigar.getSwap
-
- mr.used.record(toMB(mem.getUsed))
- mr.free.record(toMB(mem.getFree))
- mr.swapUsed.record(toMB(swap.getUsed))
- mr.swapFree.record(toMB(swap.getFree))
- mr.buffer.record(toMB(collectBuffer(mem)))
- mr.cache.record(toMB(collectCache(mem)))
-
- def collectBuffer(mem: Mem): Long = if (mem.getUsed() != mem.getActualUsed()) mem.getActualUsed() else 0L
- def collectCache(mem: Mem): Long = if (mem.getFree() != mem.getActualFree()) mem.getActualFree() else 0L
- }
-
- private def recordNetwork(nr: NetworkMetricRecorder) = {
- nr.rxBytes.record(collect(sigar, interfaces)(net ⇒ toKB(net.getRxBytes)))
- nr.txBytes.record(collect(sigar, interfaces)(net ⇒ toKB(net.getTxBytes)))
- nr.rxErrors.record(collect(sigar, interfaces)(net ⇒ net.getRxErrors))
- nr.txErrors.record(collect(sigar, interfaces)(net ⇒ net.getTxErrors))
-
- def collect(sigar: SigarProxy, interfaces: Set[String])(block: NetInterfaceStat ⇒ Long): Long = {
- interfaces.foldLeft(0L) { (totalBytes, interface) ⇒
- {
- val net = sigar.getNetInterfaceStat(interface)
- totalBytes + block(net)
- }
- }
- }
- }
-
- private def recordContextSwitches(ctxt: ContextSwitchesMetricsRecorder) = {
- def contextSwitchesByProcess(pid: Long): (Long, Long) = {
- val filename = s"/proc/$pid/status"
- var voluntaryContextSwitches = 0L
- var nonVoluntaryContextSwitches = 0L
-
- try {
- for (line ← Source.fromFile(filename).getLines()) {
- if (line.startsWith("voluntary_ctxt_switches")) {
- voluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
- }
- if (line.startsWith("nonvoluntary_ctxt_switches")) {
- nonVoluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
- }
- }
- } catch {
- case ex: IOException ⇒ {
- log.error("Error trying to read [{}]", filename)
- }
- }
- (voluntaryContextSwitches, nonVoluntaryContextSwitches)
- }
-
- def contextSwitches: Long = {
- val filename = "/proc/stat"
- var contextSwitches = 0L
-
- try {
- for (line ← Source.fromFile(filename).getLines()) {
- if (line.startsWith("ctxt")) {
- contextSwitches = line.substring(line.indexOf(" ") + 1).toLong
- }
- }
- } catch {
- case ex: IOException ⇒ {
- log.error("Error trying to read [{}]", filename)
- }
- }
- contextSwitches
- }
-
- val (perProcessVoluntary, perProcessNonVoluntary) = contextSwitchesByProcess(pid)
- ctxt.perProcessVoluntary.record(perProcessVoluntary)
- ctxt.perProcessNonVoluntary.record(perProcessNonVoluntary)
- ctxt.global.record(contextSwitches)
- }
-}
-
-object SystemMetricsCollector {
- case object Collect
-
- object OsUtils {
- def isLinux: Boolean = System.getProperty("os.name").indexOf("Linux") != -1;
- }
-
- def props(collectInterval: FiniteDuration): Props = Props[SystemMetricsCollector](new SystemMetricsCollector(collectInterval))
-}
-
-trait SigarExtensionProvider {
- lazy val sigar = SigarHolder.instance()
-
- def pid = sigar.getPid
-
- val interfaces: Set[String] = sigar.getNetInterfaceList.toSet
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala
new file mode 100644
index 00000000..ebdaf01f
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala
@@ -0,0 +1,71 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.system
+
+import java.io.File
+import akka.actor._
+import akka.event.Logging
+import kamon.supervisor.ModuleSupervisor
+import kamon.system.custom.{ ContextSwitchesUpdater, ContextSwitchesMetrics }
+import kamon.system.jmx._
+import kamon.Kamon
+import kamon.metric._
+import kamon.sigar.SigarProvisioner
+import kamon.system.sigar.SigarMetricsUpdater
+
+import kamon.util.ConfigTools.Syntax
+
+object SystemMetrics extends ExtensionId[SystemMetricsExtension] with ExtensionIdProvider {
+ override def lookup(): ExtensionId[_ <: Extension] = SystemMetrics
+ override def createExtension(system: ExtendedActorSystem): SystemMetricsExtension = new SystemMetricsExtension(system)
+}
+
+class SystemMetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+
+ val log = Logging(system, classOf[SystemMetricsExtension])
+ log.info(s"Starting the Kamon(SystemMetrics) extension")
+
+ val config = system.settings.config.getConfig("kamon.system-metrics")
+ val sigarFolder = config.getString("sigar-native-folder")
+ val sigarRefreshInterval = config.getFiniteDuration("sigar-metrics-refresh-interval")
+ val contextSwitchesRefreshInterval = config.getFiniteDuration("context-switches-refresh-interval")
+ val metricsExtension = Kamon.metrics
+
+ // Sigar-based metrics
+ SigarProvisioner.provision(new File(sigarFolder))
+ val sigarMetricsRecorder = ModuleSupervisor.get(system).createModule("sigar-metrics-recorder",
+ SigarMetricsUpdater.props(sigarRefreshInterval).withDispatcher("kamon.system-metrics.sigar-dispatcher"))
+
+ // JMX Metrics
+ ClassLoadingMetrics.register(metricsExtension)
+ GarbageCollectionMetrics.register(metricsExtension)
+ HeapMemoryMetrics.register(metricsExtension)
+ NonHeapMemoryMetrics.register(metricsExtension)
+ ThreadsMetrics.register(metricsExtension)
+
+ // If we are in Linux, add ContextSwitchesMetrics as well.
+ if (isLinux) {
+ val contextSwitchesRecorder = ContextSwitchesMetrics.register(system, contextSwitchesRefreshInterval)
+
+ ModuleSupervisor.get(system).createModule("context-switches-metrics-recorder",
+ ContextSwitchesUpdater.props(contextSwitchesRecorder, sigarRefreshInterval)
+ .withDispatcher("kamon.system-metrics.context-switches-dispatcher"))
+ }
+
+ def isLinux: Boolean =
+ System.getProperty("os.name").indexOf("Linux") != -1
+
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala
new file mode 100644
index 00000000..384c89f1
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala
@@ -0,0 +1,118 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.custom
+
+import java.io.IOException
+import java.nio.charset.StandardCharsets
+import java.nio.file.{ Paths, Files }
+
+import akka.actor.{ Props, Actor, ActorSystem }
+import akka.event.{ Logging, LoggingAdapter }
+import kamon.Kamon
+import kamon.metric._
+import kamon.metric.instrument.InstrumentFactory
+import kamon.system.custom.ContextSwitchesUpdater.UpdateContextSwitches
+import org.hyperic.sigar.Sigar
+import scala.collection.JavaConverters.iterableAsScalaIterableConverter
+import scala.concurrent.duration.FiniteDuration
+
+/**
+ * Context Switches metrics:
+ * - process-voluntary: Total number of voluntary context switches related to the current process (one thread explicitly yield the CPU to another).
+ * - process-non-voluntary: Total number of involuntary context switches related to the current process (the system scheduler suspends and active thread, and switches control to a different thread).
+ * - global: Total number of context switches across all CPUs.
+ */
+class ContextSwitchesMetrics(pid: Long, log: LoggingAdapter, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val perProcessVoluntary = histogram("context-switches-process-voluntary")
+ val perProcessNonVoluntary = histogram("context-switches-process-non-voluntary")
+ val global = histogram("context-switches-global")
+
+ def update(): Unit = {
+ def contextSwitchesByProcess(pid: Long): (Long, Long) = {
+ val filename = s"/proc/$pid/status"
+ var voluntaryContextSwitches = 0L
+ var nonVoluntaryContextSwitches = 0L
+
+ try {
+ for (line ← Files.readAllLines(Paths.get(filename), StandardCharsets.US_ASCII).asScala.toList) {
+ if (line.startsWith("voluntary_ctxt_switches")) {
+ voluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
+ }
+ if (line.startsWith("nonvoluntary_ctxt_switches")) {
+ nonVoluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
+ }
+ }
+ } catch {
+ case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
+ }
+ (voluntaryContextSwitches, nonVoluntaryContextSwitches)
+ }
+
+ def contextSwitches: Long = {
+ val filename = "/proc/stat"
+ var contextSwitches = 0L
+
+ try {
+ for (line ← Files.readAllLines(Paths.get(filename), StandardCharsets.US_ASCII).asScala.toList) {
+ if (line.startsWith("ctxt")) {
+ contextSwitches = line.substring(line.indexOf(" ") + 1).toLong
+ }
+ }
+ } catch {
+ case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
+ }
+ contextSwitches
+ }
+
+ val (voluntary, nonVoluntary) = contextSwitchesByProcess(pid)
+ perProcessVoluntary.record(voluntary)
+ perProcessNonVoluntary.record(nonVoluntary)
+ global.record(contextSwitches)
+ }
+}
+
+object ContextSwitchesMetrics {
+
+ def register(system: ActorSystem, refreshInterval: FiniteDuration): ContextSwitchesMetrics = {
+ val metricsExtension = Kamon.metrics
+ val log = Logging(system, "ContextSwitchesMetrics")
+ val pid = (new Sigar).getPid
+
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity("context-switches", "system-metric"), new ContextSwitchesMetrics(pid, log, instrumentFactory)).recorder
+ }
+}
+
+class ContextSwitchesUpdater(csm: ContextSwitchesMetrics, refreshInterval: FiniteDuration) extends Actor {
+ val schedule = context.system.scheduler.schedule(refreshInterval, refreshInterval, self, UpdateContextSwitches)(context.dispatcher)
+
+ def receive = {
+ case UpdateContextSwitches ⇒ csm.update()
+ }
+
+ override def postStop(): Unit = {
+ schedule.cancel()
+ super.postStop()
+ }
+}
+
+object ContextSwitchesUpdater {
+ case object UpdateContextSwitches
+
+ def props(csm: ContextSwitchesMetrics, refreshInterval: FiniteDuration): Props =
+ Props(new ContextSwitchesUpdater(csm, refreshInterval))
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala
new file mode 100644
index 00000000..568f1b71
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala
@@ -0,0 +1,48 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+/**
+ * Class Loading metrics, as reported by JMX:
+ * - @see [[http://docs.oracle.com/javase/7/docs/api/java/lang/management/ClassLoadingMXBean.html "ClassLoadingMXBean"]]
+ */
+class ClassLoadingMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val classLoadingBean = ManagementFactory.getClassLoadingMXBean
+
+ gauge("classes-loaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getTotalLoadedClassCount
+ })
+
+ gauge("classes-unloaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getUnloadedClassCount
+ })
+
+ gauge("classes-currently-loaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getLoadedClassCount.toLong
+ })
+
+}
+
+object ClassLoadingMetrics extends JmxSystemMetricRecorderCompanion("class-loading") {
+ def apply(instrumentFactory: InstrumentFactory): ClassLoadingMetrics =
+ new ClassLoadingMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala
new file mode 100644
index 00000000..a9ab4b62
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala
@@ -0,0 +1,54 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.jmx
+
+import java.lang.management.{ GarbageCollectorMXBean, ManagementFactory }
+
+import kamon.metric.{ Entity, MetricsExtension, GenericEntityRecorder }
+import kamon.metric.instrument.{ DifferentialValueCollector, Time, InstrumentFactory }
+import scala.collection.JavaConverters._
+
+/**
+ * Garbage Collection metrics, as reported by JMX:
+ * - @see [[http://docs.oracle.com/javase/7/docs/api/java/lang/management/GarbageCollectorMXBean.html "GarbageCollectorMXBean"]]
+ */
+class GarbageCollectionMetrics(gc: GarbageCollectorMXBean, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+
+ gauge("garbage-collection-count", DifferentialValueCollector(() ⇒ {
+ gc.getCollectionCount
+ }))
+
+ gauge("garbage-collection-time", Time.Milliseconds, DifferentialValueCollector(() ⇒ {
+ gc.getCollectionTime
+ }))
+
+}
+
+object GarbageCollectionMetrics {
+
+ def sanitizeCollectorName(name: String): String =
+ name.replaceAll("""[^\w]""", "-").toLowerCase
+
+ def register(metricsExtension: MetricsExtension): Unit = {
+
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid) map { gc ⇒
+ val gcName = sanitizeCollectorName(gc.getName)
+ metricsExtension.register(Entity(s"$gcName-garbage-collector", "system-metric"), new GarbageCollectionMetrics(gc, instrumentFactory))
+ }
+ }
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala
new file mode 100644
index 00000000..cd2e3e8e
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala
@@ -0,0 +1,49 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+/**
+ * Heap Memory metrics, as reported by JMX:
+ * - @see [[http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryMXBean.html "MemoryMXBean"]]
+ */
+class HeapMemoryMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val memoryBean = ManagementFactory.getMemoryMXBean
+ def nonHeapUsage = memoryBean.getHeapMemoryUsage
+
+ gauge("heap-used", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getUsed
+ })
+
+ gauge("heap-max", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getMax
+ })
+
+ gauge("heap-committed", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getCommitted
+ })
+
+}
+
+object HeapMemoryMetrics extends JmxSystemMetricRecorderCompanion("heap-memory") {
+ def apply(instrumentFactory: InstrumentFactory): HeapMemoryMetrics =
+ new HeapMemoryMetrics(instrumentFactory)
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/package.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala
index 43166058..be0ee08c 100644
--- a/kamon-core/src/main/scala/kamon/metric/package.scala
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala
@@ -1,6 +1,6 @@
/*
* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
@@ -14,21 +14,16 @@
* =========================================================================================
*/
-package kamon
+package kamon.system.jmx
-import scala.annotation.tailrec
-import com.typesafe.config.Config
+import kamon.metric.instrument.InstrumentFactory
+import kamon.metric.{ Entity, EntityRecorder, MetricsExtension }
-package object metric {
-
- @tailrec def combineMaps[K, V](left: Map[K, V], right: Map[K, V])(valueMerger: (V, V) ⇒ V): Map[K, V] = {
- if (right.isEmpty)
- left
- else {
- val (key, rightValue) = right.head
- val value = left.get(key).map(valueMerger(_, rightValue)).getOrElse(rightValue)
-
- combineMaps(left.updated(key, value), right.tail)(valueMerger)
- }
+abstract class JmxSystemMetricRecorderCompanion(metricName: String) {
+ def register(metricsExtension: MetricsExtension): EntityRecorder = {
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity(metricName, "system-metric"), apply(instrumentFactory)).recorder
}
-}
+
+ def apply(instrumentFactory: InstrumentFactory): EntityRecorder
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala
new file mode 100644
index 00000000..7425972b
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala
@@ -0,0 +1,53 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+/**
+ * Non Heap Memory metrics, as reported by JMX:
+ * - @see [[http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryMXBean.html "MemoryMXBean"]]
+ */
+class NonHeapMemoryMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val memoryBean = ManagementFactory.getMemoryMXBean
+ def nonHeapUsage = memoryBean.getNonHeapMemoryUsage
+
+ gauge("non-heap-used", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getUsed
+ })
+
+ gauge("non-heap-max", Memory.Bytes, () ⇒ {
+ val max = nonHeapUsage.getMax
+
+ // .getMax can return -1 if the max is not defined.
+ if (max >= 0) max
+ else 0
+ })
+
+ gauge("non-heap-committed", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getCommitted
+ })
+
+}
+
+object NonHeapMemoryMetrics extends JmxSystemMetricRecorderCompanion("non-heap-memory") {
+ def apply(instrumentFactory: InstrumentFactory): NonHeapMemoryMetrics =
+ new NonHeapMemoryMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala
new file mode 100644
index 00000000..b9bf9622
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala
@@ -0,0 +1,48 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+
+/**
+ * Threads metrics, as reported by JMX:
+ * - @see [[http://docs.oracle.com/javase/7/docs/api/java/lang/management/ThreadMXBean.html "ThreadMXBean"]]
+ */
+class ThreadsMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val threadsBean = ManagementFactory.getThreadMXBean
+
+ gauge("daemon-thread-count", () ⇒ {
+ threadsBean.getDaemonThreadCount.toLong
+ })
+
+ gauge("peak-thread-count", () ⇒ {
+ threadsBean.getPeakThreadCount.toLong
+ })
+
+ gauge("thread-count", () ⇒ {
+ threadsBean.getThreadCount.toLong
+ })
+
+}
+
+object ThreadsMetrics extends JmxSystemMetricRecorderCompanion("threads") {
+ def apply(instrumentFactory: InstrumentFactory): ThreadsMetrics =
+ new ThreadsMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala
new file mode 100644
index 00000000..0e9a5b53
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala
@@ -0,0 +1,53 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.Sigar
+
+/**
+ * Cpu usage metrics, as reported by Sigar:
+ * - user: Total percentage of system cpu user time.
+ * - system: Total percentage of system cpu kernel time.
+ * - wait: Total percentage of system cpu io wait time.
+ * - idle: Total percentage of system cpu idle time
+ * - stolen: Total percentage of system cpu involuntary wait time. @see [[https://www.datadoghq.com/2013/08/understanding-aws-stolen-cpu-and-how-it-affects-your-apps/ "Understanding Stolen Cpu"]]
+ */
+class CpuMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val user = histogram("cpu-user")
+ val system = histogram("cpu-system")
+ val Wait = histogram("cpu-wait")
+ val idle = histogram("cpu-idle")
+ val stolen = histogram("cpu-stolen")
+
+ def update(): Unit = {
+ val cpuPerc = sigar.getCpuPerc
+
+ user.record((cpuPerc.getUser * 100L).toLong)
+ system.record((cpuPerc.getSys * 100L).toLong)
+ Wait.record((cpuPerc.getWait * 100L).toLong)
+ idle.record((cpuPerc.getIdle * 100L).toLong)
+ stolen.record((cpuPerc.getStolen * 100L).toLong)
+ }
+}
+
+object CpuMetrics extends SigarMetricRecorderCompanion("cpu") {
+
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): CpuMetrics =
+ new CpuMetrics(sigar, instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala
new file mode 100644
index 00000000..06e3e37d
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala
@@ -0,0 +1,60 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import java.util.concurrent.atomic.AtomicLong
+
+import kamon.metric.instrument.{ CollectionContext, Histogram }
+
+/**
+ * Wrapper Histogram for cases in which the recorded values should always be the difference
+ * between the current value and the last recorded value. This is not thread-safe and only
+ * to be used with Sigar-based metrics that are securely updated within an actor.
+ */
+class DiffRecordingHistogram(wrappedHistogram: Histogram) extends Histogram {
+ @volatile private var _recordedAtLeastOnce = false
+ private val _lastObservedValue = new AtomicLong(0)
+
+ private def processRecording(value: Long, count: Long): Unit = {
+ if (_recordedAtLeastOnce) {
+ val diff = value - _lastObservedValue.getAndSet(value)
+ val current = if (diff >= 0) diff else 0L
+
+ wrappedHistogram.record(current, count)
+ } else {
+ _lastObservedValue.set(value)
+ _recordedAtLeastOnce = true
+ }
+ }
+
+ def record(value: Long): Unit =
+ processRecording(value, 1)
+
+ def record(value: Long, count: Long): Unit =
+ processRecording(value, count)
+
+ def cleanup: Unit =
+ wrappedHistogram.cleanup
+
+ def collect(context: CollectionContext): Histogram.Snapshot =
+ wrappedHistogram.collect(context)
+}
+
+object DiffRecordingHistogram {
+ def apply(histogram: Histogram): DiffRecordingHistogram =
+ new DiffRecordingHistogram(histogram)
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala
new file mode 100644
index 00000000..d3bfefbe
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala
@@ -0,0 +1,48 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+import org.hyperic.sigar.{ DiskUsage, FileSystem, Sigar }
+import scala.util.Try
+
+/**
+ * Disk usage metrics, as reported by Sigar:
+ * - readBytes: Total number of physical disk reads.
+ * - writesBytes: Total number of physical disk writes.
+ */
+class FileSystemMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val reads = DiffRecordingHistogram(histogram("file-system-reads", Memory.Bytes))
+ val writes = DiffRecordingHistogram(histogram("file-system-writes", Memory.Bytes))
+
+ val fileSystems = sigar.getFileSystemList.filter(_.getType == FileSystem.TYPE_LOCAL_DISK).map(_.getDevName).toSet
+
+ def sumOfAllFileSystems(sigar: Sigar, thunk: DiskUsage ⇒ Long): Long = Try {
+ fileSystems.map(i ⇒ thunk(sigar.getDiskUsage(i))).fold(0L)(_ + _)
+ } getOrElse 0L
+
+ def update(): Unit = {
+ reads.record(sumOfAllFileSystems(sigar, _.getReadBytes))
+ writes.record(sumOfAllFileSystems(sigar, _.getWriteBytes))
+ }
+}
+
+object FileSystemMetrics extends SigarMetricRecorderCompanion("file-system") {
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): FileSystemMetrics =
+ new FileSystemMetrics(sigar, instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala
new file mode 100644
index 00000000..8d7bd808
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala
@@ -0,0 +1,45 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.Sigar
+
+/**
+ * Load Average metrics, as reported by Sigar:
+ * - The system load averages for the past 1, 5, and 15 minutes.
+ */
+class LoadAverageMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val oneMinute = histogram("one-minute")
+ val fiveMinutes = histogram("five-minutes")
+ val fifteenMinutes = histogram("fifteen-minutes")
+
+ def update(): Unit = {
+ val loadAverage = sigar.getLoadAverage
+
+ oneMinute.record(loadAverage(0).toLong)
+ fiveMinutes.record(loadAverage(1).toLong)
+ fifteenMinutes.record(loadAverage(2).toLong)
+ }
+}
+
+object LoadAverageMetrics extends SigarMetricRecorderCompanion("load-average") {
+
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): LoadAverageMetrics =
+ new LoadAverageMetrics(sigar, instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala
new file mode 100644
index 00000000..787c9f2f
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala
@@ -0,0 +1,57 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+import org.hyperic.sigar.Sigar
+
+/**
+ * System memory usage metrics, as reported by Sigar:
+ * - used: Total used system memory.
+ * - free: Total free system memory (e.g. Linux plus cached).
+ * - swap-used: Total used system swap.
+ * - swap-free: Total free system swap.
+ */
+class MemoryMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val used = histogram("memory-used", Memory.Bytes)
+ val cached = histogram("memory-cache-and-buffer", Memory.Bytes)
+ val free = histogram("memory-free", Memory.Bytes)
+ val total = histogram("memory-total", Memory.Bytes)
+ val swapUsed = histogram("swap-used", Memory.Bytes)
+ val swapFree = histogram("swap-free", Memory.Bytes)
+
+ def update(): Unit = {
+ val mem = sigar.getMem
+ val swap = sigar.getSwap
+ val cachedMemory = if (mem.getActualFree > mem.getFree) mem.getActualFree - mem.getFree else 0L
+
+ used.record(mem.getActualUsed)
+ free.record(mem.getActualFree)
+ cached.record(cachedMemory)
+ total.record(mem.getTotal)
+ swapUsed.record(swap.getUsed)
+ swapFree.record(swap.getFree)
+ }
+}
+
+object MemoryMetrics extends SigarMetricRecorderCompanion("memory") {
+
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): MemoryMetrics =
+ new MemoryMetrics(sigar, instrumentFactory)
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala
new file mode 100644
index 00000000..30575508
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala
@@ -0,0 +1,61 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument._
+import org.hyperic.sigar.{ NetInterfaceStat, Sigar }
+import scala.util.Try
+
+/**
+ * Network metrics, as reported by Sigar:
+ * - rxBytes: Total number of received packets in bytes.
+ * - txBytes: Total number of transmitted packets in bytes.
+ * - rxErrors: Total number of packets received with errors. This includes too-long-frames errors, ring-buffer overflow errors, etc.
+ * - txErrors: Total number of errors encountered while transmitting packets. This list includes errors due to the transmission being aborted, errors due to the carrier, etc.
+ * - rxDropped: Total number of incoming packets dropped.
+ * - txDropped: Total number of outgoing packets dropped.
+ */
+class NetworkMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val receivedBytes = DiffRecordingHistogram(histogram("rx-bytes", Memory.Bytes))
+ val transmittedBytes = DiffRecordingHistogram(histogram("tx-bytes", Memory.Bytes))
+ val receiveErrors = DiffRecordingHistogram(histogram("rx-errors"))
+ val transmitErrors = DiffRecordingHistogram(histogram("tx-errors"))
+ val receiveDrops = DiffRecordingHistogram(histogram("rx-dropped"))
+ val transmitDrops = DiffRecordingHistogram(histogram("tx-dropped"))
+
+ val interfaces = sigar.getNetInterfaceList.toList.filter(_ != "lo")
+
+ def sumOfAllInterfaces(sigar: Sigar, thunk: NetInterfaceStat ⇒ Long): Long = Try {
+ interfaces.map(i ⇒ thunk(sigar.getNetInterfaceStat(i))).fold(0L)(_ + _)
+
+ } getOrElse 0L
+
+ def update(): Unit = {
+ receivedBytes.record(sumOfAllInterfaces(sigar, _.getRxBytes))
+ transmittedBytes.record(sumOfAllInterfaces(sigar, _.getTxBytes))
+ receiveErrors.record(sumOfAllInterfaces(sigar, _.getRxErrors))
+ transmitErrors.record(sumOfAllInterfaces(sigar, _.getTxErrors))
+ receiveDrops.record(sumOfAllInterfaces(sigar, _.getRxDropped))
+ transmitDrops.record(sumOfAllInterfaces(sigar, _.getTxDropped))
+ }
+}
+
+object NetworkMetrics extends SigarMetricRecorderCompanion("network") {
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): NetworkMetrics =
+ new NetworkMetrics(sigar, instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala
new file mode 100644
index 00000000..4432e6cd
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala
@@ -0,0 +1,77 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.{ ProcCpu, Sigar }
+
+/**
+ * Process Cpu usage metrics, as reported by Sigar:
+ * - user: Process cpu user time.
+ * - total: Process cpu time (sum of User and Sys).
+ * - system: Process cpu kernel time.
+ */
+class ProcessCpuMetrics(sigar: Sigar, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val processUserCpu = histogram("process-user-cpu")
+ val processSystemCpu = histogram("process-system-cpu")
+ val processTotalCpu = histogram("process-cpu")
+
+ val pid = sigar.getPid
+ val totalCores = sigar.getCpuInfoList.headOption.map(_.getTotalCores.toLong).getOrElse(1L)
+
+ var lastProcCpu: ProcCpu = sigar.getProcCpu(pid)
+ var currentLoad: Long = 0
+
+ /**
+ * While CPU usage time updates not very often, We have introduced a simple heuristic, that supposes that the load is the same as previous,
+ * while CPU usage time doesn't update. But supposing that it could be zero load for a process for some time,
+ * We used an arbitrary duration of 2000 milliseconds, after which the same CPU usage time value become legal, and it is supposed that the load is really zero.
+ *
+ * @see [[http://stackoverflow.com/questions/19323364/using-sigar-api-to-get-jvm-cpu-usage "StackOverflow: Using Sigar API to get JVM Cpu usage"]]
+ */
+ def update(): Unit = {
+ val currentProcCpu = sigar.getProcCpu(pid)
+ val totalDiff = currentProcCpu.getTotal - lastProcCpu.getTotal
+ val userDiff = currentProcCpu.getUser - lastProcCpu.getUser
+ val systemDiff = currentProcCpu.getSys - lastProcCpu.getSys
+ val timeDiff = currentProcCpu.getLastTime - lastProcCpu.getLastTime
+
+ def percentUsage(delta: Long): Long = 100 * delta / timeDiff / totalCores
+
+ if (totalDiff == 0) {
+ if (timeDiff > 2000) currentLoad = 0
+ if (currentLoad == 0) lastProcCpu = currentProcCpu
+ } else {
+ val totalPercent = percentUsage(totalDiff)
+ val userPercent = percentUsage(userDiff)
+ val systemPercent = percentUsage(systemDiff)
+
+ processUserCpu.record(userPercent)
+ processSystemCpu.record(systemPercent)
+ processTotalCpu.record(userPercent + systemPercent)
+
+ currentLoad = totalPercent
+ lastProcCpu = currentProcCpu
+ }
+ }
+}
+
+object ProcessCpuMetrics extends SigarMetricRecorderCompanion("process-cpu") {
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): ProcessCpuMetrics =
+ new ProcessCpuMetrics(sigar, instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala
deleted file mode 100644
index 607ebe13..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.system.sigar
-
-import java.io._
-import java.text.SimpleDateFormat
-import java.util
-import java.util.logging.Logger
-import java.util.{ ArrayList, Date, List }
-
-import org.hyperic.sigar._
-
-import scala.annotation.tailrec
-import scala.collection.JavaConversions._
-import scala.io.Source
-
-object SigarHolder {
- private lazy val sigarProxy = SigarLoader.sigarProxy
- def instance() = sigarProxy
-}
-
-object SigarLoader {
-
- val Version = "1.6.4"
- val JavaLibraryPath = "java.library.path"
- val TmpDir = "java.io.tmpdir"
- val IndexFile = "/kamon/system/sigar/index"
- val UsrPathField = "usr_paths"
-
- private val log = Logger.getLogger("SigarLoader")
-
- def sigarProxy = init(new File(System.getProperty(TmpDir)))
-
- private[sigar] def init(baseTmp: File): SigarProxy = {
- val tmpDir = createTmpDir(baseTmp)
- for (lib ← loadIndex) copy(lib, tmpDir)
-
- attachToLibraryPath(tmpDir)
-
- try {
- val sigar = new Sigar()
- printBanner(sigar)
- sigar
- } catch {
- case t: Throwable ⇒ {
- log.severe("Failed to load sigar")
- throw new RuntimeException(t)
- }
- }
- }
-
- private[sigar] val usrPathField = {
- val usrPathField = classOf[ClassLoader].getDeclaredField(UsrPathField)
- usrPathField.setAccessible(true)
- usrPathField
- }
-
- private[sigar] def attachToLibraryPath(dir: File): Unit = {
- val dirAbsolute = dir.getAbsolutePath
- System.setProperty(JavaLibraryPath, newLibraryPath(dirAbsolute))
- var paths = usrPathField.get(null).asInstanceOf[Array[String]]
- if (paths == null) paths = new Array[String](0)
- for (path ← paths) if (path == dirAbsolute) return
- val newPaths = util.Arrays.copyOf(paths, paths.length + 1)
- newPaths(newPaths.length - 1) = dirAbsolute
- usrPathField.set(null, newPaths)
- }
-
- private[sigar] def newLibraryPath(dirAbsolutePath: String): String = {
- Option(System.getProperty(JavaLibraryPath)).fold(dirAbsolutePath)(oldValue ⇒ s"$dirAbsolutePath${File.pathSeparator}$oldValue")
- }
-
- private[sigar] def copy(lib: String, tmpDir: File) {
- val target = new File(tmpDir, lib)
- if (target.exists()) return
- write(classOf[Loader].getResourceAsStream(lib), target)
- }
-
- private[sigar] def createTmpDir(baseTmp: File): File = {
- val tmpDir = new File(baseTmp, s"sigar-$Version")
- if (!tmpDir.exists()) {
- if (!tmpDir.mkdirs()) throw new RuntimeException(s"Could not create temp sigar directory: ${tmpDir.getAbsolutePath}")
- }
- if (!tmpDir.isDirectory) throw new RuntimeException(s"sigar temp directory path is not a directory: ${tmpDir.getAbsolutePath}")
- if (!tmpDir.canWrite()) throw new RuntimeException(s"sigar temp directory not writeable: ${tmpDir.getAbsolutePath}")
- tmpDir
- }
-
- private[sigar] def loadIndex(): List[String] = {
- val libs = new ArrayList[String]()
- val is = classOf[Loader].getResourceAsStream(IndexFile)
-
- for (line ← Source.fromInputStream(is).getLines()) {
- val currentLine = line.trim()
- libs add currentLine
- }
- libs
- }
-
- private[sigar] def write(input: InputStream, to: File) {
- val out = new FileOutputStream(to)
- try {
- transfer(input, out)
- } finally {
- out.close()
- }
- }
-
- private[sigar] def transfer(input: InputStream, out: OutputStream) {
- val buffer = new Array[Byte](8192)
-
- @tailrec def transfer() {
- val read = input.read(buffer)
- if (read >= 0) {
- out.write(buffer, 0, read)
- transfer()
- }
- }
- transfer()
- }
-
- private[sigar] def printBanner(sigar: Sigar) = {
- val os = OperatingSystem.getInstance
-
- def loadAverage(sigar: Sigar) = {
- try {
- val average = sigar.getLoadAverage
- (average(0), average(1), average(2))
- } catch {
- case s: org.hyperic.sigar.SigarNotImplementedException ⇒ {
- (0d, 0d, 0d)
- }
- }
- }
-
- def uptime(sigar: Sigar) = {
- def formatUptime(uptime: Double): String = {
- var retval: String = ""
- val days: Int = uptime.toInt / (60 * 60 * 24)
- var minutes: Int = 0
- var hours: Int = 0
-
- if (days != 0) {
- retval += s"$days ${(if ((days > 1)) "days" else "day")}, "
- }
-
- minutes = uptime.toInt / 60
- hours = minutes / 60
- hours %= 24
- minutes %= 60
-
- if (hours != 0) {
- retval += hours + ":" + minutes
- } else {
- retval += minutes + " min"
- }
- retval
- }
-
- val uptime = sigar.getUptime
- val now = System.currentTimeMillis()
-
- s"up ${formatUptime(uptime.getUptime())}"
- }
-
- val message =
- """
- |
- | _____ _ __ __ _ _ _ _ _
- | / ____| | | | \/ | | | (_) | | | | | |
- || (___ _ _ ___| |_ ___ _ __ ___ | \ / | ___| |_ _ __ _ ___ ___| | ___ __ _ __| | ___ __| |
- | \___ \| | | / __| __/ _ \ '_ ` _ \| |\/| |/ _ \ __| '__| |/ __/ __| | / _ \ / _` |/ _` |/ _ \/ _` |
- | ____) | |_| \__ \ || __/ | | | | | | | | __/ |_| | | | (__\__ \ |___| (_) | (_| | (_| | __/ (_| |
- ||_____/ \__, |___/\__\___|_| |_| |_|_| |_|\___|\__|_| |_|\___|___/______\___/ \__,_|\__,_|\___|\__,_|
- | __/ |
- | |___/
- |
- | [System Status] [OS Information]
- | |--------------------------------| |----------------------------------------|
- | Up Time: %-10s Description: %s
- | Load Average: %-16s Name: %s
- | Version: %s
- | Arch: %s
- |
- """.stripMargin.format(uptime(sigar), os.getDescription, loadAverage(sigar), os.getName, os.getVersion, os.getArch)
- log.info(message)
- }
- class Loader private[sigar]
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala
new file mode 100644
index 00000000..e68b0ede
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala
@@ -0,0 +1,75 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import akka.actor.{ Props, Actor }
+import kamon.Kamon
+import kamon.metric.instrument.InstrumentFactory
+import kamon.metric.{ Entity, EntityRecorder, MetricsExtension }
+import kamon.system.sigar.SigarMetricsUpdater.UpdateSigarMetrics
+import org.hyperic.sigar.Sigar
+
+import scala.concurrent.duration.FiniteDuration
+
+class SigarMetricsUpdater(refreshInterval: FiniteDuration) extends Actor {
+ val sigar = new Sigar
+ val metricsExtension = Kamon.metrics
+
+ val sigarMetrics = List(
+ CpuMetrics.register(sigar, metricsExtension),
+ FileSystemMetrics.register(sigar, metricsExtension),
+ LoadAverageMetrics.register(sigar, metricsExtension),
+ MemoryMetrics.register(sigar, metricsExtension),
+ NetworkMetrics.register(sigar, metricsExtension),
+ ProcessCpuMetrics.register(sigar, metricsExtension))
+
+ val refreshSchedule = context.system.scheduler.schedule(refreshInterval, refreshInterval, self, UpdateSigarMetrics)(context.dispatcher)
+
+ def receive = {
+ case UpdateSigarMetrics ⇒ updateMetrics()
+ }
+
+ def updateMetrics(): Unit = {
+ sigarMetrics.foreach(_.update())
+ }
+
+ override def postStop(): Unit = {
+ refreshSchedule.cancel()
+ super.postStop()
+ }
+}
+
+object SigarMetricsUpdater {
+ def props(refreshInterval: FiniteDuration): Props =
+ Props(new SigarMetricsUpdater((refreshInterval)))
+
+ case object UpdateSigarMetrics
+}
+
+trait SigarMetric extends EntityRecorder {
+ def update(): Unit
+}
+
+abstract class SigarMetricRecorderCompanion(metricName: String) {
+ def register(sigar: Sigar, metricsExtension: MetricsExtension): SigarMetric = {
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity(metricName, "system-metric"), apply(sigar, instrumentFactory)).recorder
+ }
+
+ def apply(sigar: Sigar, instrumentFactory: InstrumentFactory): SigarMetric
+}
+
diff --git a/kamon-system-metrics/src/test/scala/kamon/metrics/RedirectLogging.scala b/kamon-system-metrics/src/test/scala/kamon/metrics/RedirectLogging.scala
new file mode 100644
index 00000000..fbf42cf0
--- /dev/null
+++ b/kamon-system-metrics/src/test/scala/kamon/metrics/RedirectLogging.scala
@@ -0,0 +1,34 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import java.util.logging.LogManager
+import org.slf4j.bridge.SLF4JBridgeHandler
+
+/**
+ * Redirect different logging sources to SLF4J.
+ */
+trait RedirectLogging {
+
+ def redirectLogging(): Unit = {
+ // Redirect JUL to SLF4J.
+ LogManager.getLogManager().reset();
+ SLF4JBridgeHandler.install();
+ }
+
+ redirectLogging()
+
+}
diff --git a/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
index 4f7867ed..4d633952 100644
--- a/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
+++ b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
@@ -15,362 +15,140 @@
package kamon.metric
-import akka.actor.ActorSystem
-import akka.testkit.{ TestKitBase, TestProbe }
+import java.lang.management.ManagementFactory
+
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metrics.CPUMetrics.CPUMetricSnapshot
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsSnapshot
-import kamon.metrics.GCMetrics.GCMetricSnapshot
-import kamon.metrics.HeapMetrics.HeapMetricSnapshot
-import kamon.metrics.MemoryMetrics.MemoryMetricSnapshot
-import kamon.metrics.NetworkMetrics.NetworkMetricSnapshot
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsSnapshot
-import kamon.metrics._
-import kamon.system.SystemMetricsExtension
-import org.scalatest.{ Matchers, WordSpecLike }
-
-import scala.concurrent.duration._
-
-class SystemMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("system-metrics-spec", ConfigFactory.parseString(
- """
- |akka {
- | extensions = ["kamon.system.SystemMetrics"]
- |}
- |
- |kamon.metrics {
- |
- | disable-aspectj-weaver-missing-error = true
- |
- | tick-interval = 1 second
- |
- | system {
- | cpu {
- | user {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | system {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | wait {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | idle {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- | process-cpu {
- | user {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | system {
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- | memory {
- | used {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | free {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | buffer {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | cache {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | swap-used {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | swap-free {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- | context-switches {
- | per-process-voluntary {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | per-process-non-voluntary {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | global {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- | network {
- | rx-bytes {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | tx-bytes {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | rx-errors {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | tx-errors {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- | heap {
- | used {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | max {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | committed {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- | gc {
- | count {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | time {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- | }
- | }
- |}
- """.stripMargin))
-
- "the Kamon CPU Metrics" should {
- "record user, system, wait, idle metrics" in new CPUMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val CPUMetrics = expectCPUMetrics(metricsListener, 3 seconds)
- CPUMetrics.user.max should be >= 0L
- CPUMetrics.system.max should be >= 0L
- CPUMetrics.cpuWait.max should be >= 0L
- CPUMetrics.idle.max should be >= 0L
- }
- }
- "the Kamon GC Metrics" should {
- "record count, time metrics" in new GCMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+import kamon.system.jmx.GarbageCollectionMetrics
+import kamon.testkit.BaseKamonSpec
+import scala.collection.JavaConverters._
- val GCMetrics = expectGCMetrics(metricsListener, 3 seconds)
- GCMetrics.count.max should be > 0L
- GCMetrics.time.max should be > 0L
- }
- }
+class SystemMetricsSpec extends BaseKamonSpec("system-metrics-spec") with RedirectLogging {
- "the Kamon Heap Metrics" should {
- "record used, max, commited metrics" in new HeapMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ |
+ |akka {
+ | extensions = ["kamon.system.SystemMetrics"]
+ |}
+ """.stripMargin)
- val HeapMetrics = expectHeapMetrics(metricsListener, 3 seconds)
- HeapMetrics.used.max should be >= 0L
- HeapMetrics.max.max should be >= 0L
- HeapMetrics.committed.max should be >= 0L
- }
- }
+ override protected def beforeAll(): Unit =
+ Thread.sleep(2000) // Give some room to the recorders to store some values.
- "the Kamon Memory Metrics" should {
- "record used, free, buffer, cache, swap used, swap free metrics" in new MemoryMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val MemoryMetrics = expectMemoryMetrics(metricsListener, 3 seconds)
- MemoryMetrics.used.max should be >= 0L
- MemoryMetrics.free.max should be >= 0L
- MemoryMetrics.buffer.max should be >= 0L
- MemoryMetrics.cache.max should be >= 0L
- MemoryMetrics.swapUsed.max should be >= 0L
- MemoryMetrics.swapFree.max should be >= 0L
+ "the Kamon System Metrics module" should {
+ "record user, system, wait, idle and stolen CPU metrics" in {
+ val cpuMetrics = takeSnapshotOf("cpu", "system-metric")
+
+ cpuMetrics.histogram("cpu-user").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-system").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-wait").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-idle").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-stolen").get.numberOfMeasurements should be > 0L
}
- }
- "the Kamon Network Metrics" should {
- "record rxBytes, txBytes, rxErrors, txErrors metrics" in new NetworkMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ "record count and time garbage collection metrics" in {
+ val availableGarbageCollectors = ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid)
- val NetworkMetrics = expectNetworkMetrics(metricsListener, 3 seconds)
- NetworkMetrics.rxBytes.max should be >= 0L
- NetworkMetrics.txBytes.max should be >= 0L
- NetworkMetrics.rxErrors.max should be >= 0L
- NetworkMetrics.txErrors.max should be >= 0L
+ for (collectorName ← availableGarbageCollectors) {
+ val sanitizedName = GarbageCollectionMetrics.sanitizeCollectorName(collectorName.getName)
+ val collectorMetrics = takeSnapshotOf(s"$sanitizedName-garbage-collector", "system-metric")
+
+ collectorMetrics.gauge("garbage-collection-count").get.numberOfMeasurements should be > 0L
+ collectorMetrics.gauge("garbage-collection-time").get.numberOfMeasurements should be > 0L
+ }
}
- }
- "the Kamon Process CPU Metrics" should {
- "record Cpu Percent, Total Process Time metrics" in new ProcessCPUMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ "record used, max and committed heap metrics" in {
+ val heapMetrics = takeSnapshotOf("heap-memory", "system-metric")
- val ProcessCPUMetrics = expectProcessCPUMetrics(metricsListener, 3 seconds)
- ProcessCPUMetrics.cpuPercent.max should be > 0L
- ProcessCPUMetrics.totalProcessTime.max should be > 0L
+ heapMetrics.gauge("heap-used").get.numberOfMeasurements should be > 0L
+ heapMetrics.gauge("heap-max").get.numberOfMeasurements should be > 0L
+ heapMetrics.gauge("heap-committed").get.numberOfMeasurements should be > 0L
}
- }
- "the Kamon ContextSwitches Metrics" should {
- "record Context Switches Global, Voluntary and Non Voluntary metrics" in new ContextSwitchesMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ "record used, max and committed non-heap metrics" in {
+ val nonHeapMetrics = takeSnapshotOf("non-heap-memory", "system-metric")
- val ContextSwitchesMetrics = expectContextSwitchesMetrics(metricsListener, 3 seconds)
- ContextSwitchesMetrics.perProcessVoluntary.max should be >= 0L
- ContextSwitchesMetrics.perProcessNonVoluntary.max should be >= 0L
- ContextSwitchesMetrics.global.max should be >= 0L
+ nonHeapMetrics.gauge("non-heap-used").get.numberOfMeasurements should be > 0L
+ nonHeapMetrics.gauge("non-heap-max").get.numberOfMeasurements should be > 0L
+ nonHeapMetrics.gauge("non-heap-committed").get.numberOfMeasurements should be > 0L
}
- }
- def expectCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): CPUMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val cpuMetricsOption = tickSnapshot.metrics.get(CPUMetrics(SystemMetricsExtension.CPU))
- cpuMetricsOption should not be empty
- cpuMetricsOption.get.asInstanceOf[CPUMetricSnapshot]
- }
+ "record daemon, count and peak jvm threads metrics" in {
+ val threadsMetrics = takeSnapshotOf("threads", "system-metric")
- trait CPUMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(CPUMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ threadsMetrics.gauge("daemon-thread-count").get.numberOfMeasurements should be > 0L
+ threadsMetrics.gauge("peak-thread-count").get.numberOfMeasurements should be > 0L
+ threadsMetrics.gauge("thread-count").get.numberOfMeasurements should be > 0L
}
- }
- def expectGCMetrics(listener: TestProbe, waitTime: FiniteDuration): GCMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ "record loaded, unloaded and current class loading metrics" in {
+ val classLoadingMetrics = takeSnapshotOf("class-loading", "system-metric")
+
+ classLoadingMetrics.gauge("classes-loaded").get.numberOfMeasurements should be > 0L
+ classLoadingMetrics.gauge("classes-unloaded").get.numberOfMeasurements should be > 0L
+ classLoadingMetrics.gauge("classes-currently-loaded").get.numberOfMeasurements should be > 0L
}
- val gcMetricsOption = tickSnapshot.metrics.get(GCMetrics(SystemMetricsExtension.garbageCollectors(0).getName))
- gcMetricsOption should not be empty
- gcMetricsOption.get.asInstanceOf[GCMetricSnapshot]
- }
+ "record reads, writes, queue time and service time file system metrics" in {
+ val fileSystemMetrics = takeSnapshotOf("file-system", "system-metric")
- trait GCMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(GCMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ fileSystemMetrics.histogram("file-system-reads").get.numberOfMeasurements should be > 0L
+ fileSystemMetrics.histogram("file-system-writes").get.numberOfMeasurements should be > 0L
}
- }
- def expectHeapMetrics(listener: TestProbe, waitTime: FiniteDuration): HeapMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val heapMetricsOption = tickSnapshot.metrics.get(HeapMetrics(SystemMetricsExtension.Heap))
- heapMetricsOption should not be empty
- heapMetricsOption.get.asInstanceOf[HeapMetricSnapshot]
- }
+ "record 1 minute, 5 minutes and 15 minutes metrics load average metrics" in {
+ val loadAverage = takeSnapshotOf("load-average", "system-metric")
- trait HeapMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(HeapMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ loadAverage.histogram("one-minute").get.numberOfMeasurements should be > 0L
+ loadAverage.histogram("five-minutes").get.numberOfMeasurements should be > 0L
+ loadAverage.histogram("fifteen-minutes").get.numberOfMeasurements should be > 0L
}
- }
- def expectMemoryMetrics(listener: TestProbe, waitTime: FiniteDuration): MemoryMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val memoryMetricsOption = tickSnapshot.metrics.get(MemoryMetrics(SystemMetricsExtension.Memory))
- memoryMetricsOption should not be empty
- memoryMetricsOption.get.asInstanceOf[MemoryMetricSnapshot]
- }
+ "record used, free, swap used, swap free system memory metrics" in {
+ val memoryMetrics = takeSnapshotOf("memory", "system-metric")
- trait MemoryMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(MemoryMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ memoryMetrics.histogram("memory-used").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("memory-free").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("swap-used").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("swap-free").get.numberOfMeasurements should be > 0L
}
- }
- def expectNetworkMetrics(listener: TestProbe, waitTime: FiniteDuration): NetworkMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val networkMetricsOption = tickSnapshot.metrics.get(NetworkMetrics(SystemMetricsExtension.Network))
- networkMetricsOption should not be empty
- networkMetricsOption.get.asInstanceOf[NetworkMetricSnapshot]
- }
+ "record rxBytes, txBytes, rxErrors, txErrors, rxDropped, txDropped network metrics" in {
+ val networkMetrics = takeSnapshotOf("network", "system-metric")
- trait NetworkMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(NetworkMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ networkMetrics.histogram("tx-bytes").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-bytes").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("tx-errors").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-errors").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("tx-dropped").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-dropped").get.numberOfMeasurements should be > 0L
}
- }
- def expectProcessCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): ProcessCPUMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val processCPUMetricsOption = tickSnapshot.metrics.get(ProcessCPUMetrics(SystemMetricsExtension.ProcessCPU))
- processCPUMetricsOption should not be empty
- processCPUMetricsOption.get.asInstanceOf[ProcessCPUMetricsSnapshot]
- }
+ "record system and user CPU percentage for the application process" in {
+ val processCpuMetrics = takeSnapshotOf("process-cpu", "system-metric")
- trait ProcessCPUMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ProcessCPUMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ processCpuMetrics.histogram("process-user-cpu").get.numberOfMeasurements should be > 0L
+ processCpuMetrics.histogram("process-system-cpu").get.numberOfMeasurements should be > 0L
+ processCpuMetrics.histogram("process-cpu").get.numberOfMeasurements should be > 0L
}
- }
- def expectContextSwitchesMetrics(listener: TestProbe, waitTime: FiniteDuration): ContextSwitchesMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val contextSwitchesMetricsOption = tickSnapshot.metrics.get(ContextSwitchesMetrics(SystemMetricsExtension.ContextSwitches))
- contextSwitchesMetricsOption should not be empty
- contextSwitchesMetricsOption.get.asInstanceOf[ContextSwitchesMetricsSnapshot]
- }
+ "record Context Switches Global, Voluntary and Non Voluntary metrics when running on Linux" in {
+ if (isLinux) {
+ val contextSwitchesMetrics = takeSnapshotOf("context-switches", "system-metric")
- trait ContextSwitchesMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ContextSwitchesMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ contextSwitchesMetrics.histogram("context-switches-process-voluntary").get.numberOfMeasurements should be > 0L
+ contextSwitchesMetrics.histogram("context-switches-process-non-voluntary").get.numberOfMeasurements should be > 0L
+ contextSwitchesMetrics.histogram("context-switches-global").get.numberOfMeasurements should be > 0L
+ }
}
}
+
+ def isLinux: Boolean =
+ System.getProperty("os.name").indexOf("Linux") != -1
+
}
diff --git a/kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala b/kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala
index b7050c59..2f77df95 100644
--- a/kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala
+++ b/kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala
@@ -14,11 +14,12 @@
* =========================================================================================
*/
-package kamon
+package testkit
-import akka.actor.{ Extension, ActorSystem, ExtensionId }
import java.util.concurrent.ConcurrentHashMap
+import akka.actor.{ ActorSystem, Extension, ExtensionId }
+
object AkkaExtensionSwap {
def swap(system: ActorSystem, key: ExtensionId[_], value: Extension): Unit = {
val extensionsField = system.getClass.getDeclaredField("extensions")
diff --git a/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala b/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
index 825cc718..9e736971 100644
--- a/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
+++ b/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
@@ -17,7 +17,7 @@
package akka.testkit
import org.aspectj.lang.annotation._
-import kamon.trace.{ EmptyTraceContext, TraceContextAware, TraceRecorder }
+import kamon.trace.{ EmptyTraceContext, TraceContextAware, TraceContext }
import org.aspectj.lang.ProceedingJoinPoint
import akka.testkit.TestActor.RealMessage
@@ -46,7 +46,7 @@ class TestProbeInstrumentation {
case _ ⇒ EmptyTraceContext
}
- TraceRecorder.withTraceContext(traceContext) {
+ TraceContext.withContext(traceContext) {
pjp.proceed
}
}
diff --git a/project/AspectJ.scala b/project/AspectJ.scala
index 74513bd6..a4a8c2c1 100644
--- a/project/AspectJ.scala
+++ b/project/AspectJ.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import sbt._
import sbt.Keys._
import com.typesafe.sbt.SbtAspectj.{ Aspectj, defaultAspectjSettings }
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index 2f80f1db..1c748e86 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import sbt._
object Dependencies {
@@ -10,16 +25,17 @@ object Dependencies {
val sprayVersion = "1.2.2"
val akkaVersion = "2.2.4"
val aspectjVersion = "1.8.4"
- val slf4jVersion = "1.7.6"
val playVersion = "2.2.5"
+ val slf4jVersion = "1.7.7"
+ val sigarVersion = "1.6.5.132"
- val sprayJson = "io.spray" %% "spray-json" % "1.3.0"
- val sprayJsonLenses = "net.virtual-void" %% "json-lenses" % "0.5.4"
+ val sprayJson = "io.spray" %% "spray-json" % "1.3.1"
+ val sprayJsonLenses = "net.virtual-void" %% "json-lenses" % "0.6.0"
val scalatest = "org.scalatest" %% "scalatest" % "2.2.1"
val logback = "ch.qos.logback" % "logback-classic" % "1.0.13"
val aspectJ = "org.aspectj" % "aspectjweaver" % aspectjVersion
val newrelic = "com.newrelic.agent.java" % "newrelic-api" % "3.11.0"
- val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "1.2.1"
+ val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.3"
val sprayCan = "io.spray" % "spray-can" % sprayVersion
val sprayRouting = "io.spray" % "spray-routing" % sprayVersion
val sprayTestkit = "io.spray" % "spray-testkit" % sprayVersion
@@ -33,9 +49,12 @@ object Dependencies {
val playTest = "org.scalatestplus" %% "play" % "1.3.0"
val slf4Api = "org.slf4j" % "slf4j-api" % slf4jVersion
val slf4nop = "org.slf4j" % "slf4j-nop" % slf4jVersion
+ val slf4Jul = "org.slf4j" % "jul-to-slf4j" % slf4jVersion
+ val slf4Log4j = "org.slf4j" % "log4j-over-slf4j" % slf4jVersion
val scalaCompiler = "org.scala-lang" % "scala-compiler" % Settings.ScalaVersion
- val sigar = "org.fusesource" % "sigar" % "1.6.4"
val scalazConcurrent = "org.scalaz" %% "scalaz-concurrent" % "7.1.0"
+ val sigarLoader = "io.kamon" % "sigar-loader" % "1.6.5-rev001"
+ val h2 = "com.h2database" % "h2" % "1.4.182"
def compile (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile")
def provided (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
diff --git a/project/Projects.scala b/project/Projects.scala
index 69b4b065..392e7c62 100644
--- a/project/Projects.scala
+++ b/project/Projects.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import sbt._
import Keys._
@@ -6,31 +21,60 @@ object Projects extends Build {
import Settings._
import Dependencies._
- lazy val root = Project("root", file("."))
+ lazy val kamon = Project("kamon", file("."))
.aggregate(kamonCore, kamonSpray, kamonNewrelic, kamonPlayground, kamonDashboard, kamonTestkit, kamonPlay, kamonStatsD,
- kamonDatadog, kamonSystemMetrics, kamonLogReporter, kamonAkkaRemote)
+ kamonDatadog, kamonSystemMetrics, kamonLogReporter, kamonAkkaRemote, kamonJdbc)
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(noPublishing: _*)
- lazy val kamonCore = Project("kamon-core", file("kamon-core"))
+ lazy val kamonCore: Project = Project("kamon-core", file("kamon-core"))
.dependsOn(kamonMacros % "compile-internal, test-internal")
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(aspectJSettings: _*)
.settings(
+ javacOptions in Compile ++= Seq("-XDignore.symbol.file"),
mappings in (Compile, packageBin) ++= mappings.in(kamonMacros, Compile, packageBin).value,
mappings in (Compile, packageSrc) ++= mappings.in(kamonMacros, Compile, packageSrc).value,
libraryDependencies ++=
compile(akkaActor, hdrHistogram) ++
provided(aspectJ) ++
- optional(logback, scalazConcurrent) ++
- test(scalatest, akkaTestKit, akkaSlf4j, logback))
+ optional(logback) ++
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+ lazy val kamonAkka = Project("kamon-akka", file("kamon-akka"))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonMacros % "compile-internal, test-internal")
+ .dependsOn(kamonScala)
+ .settings(basicSettings: _* )
+ .settings(formatSettings: _*)
+ .settings(aspectJSettings: _*)
+ .settings(
+ libraryDependencies ++=
+ compile(akkaActor) ++
+ provided(aspectJ) ++
+ optional(logback) ++
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+
+
+ lazy val kamonScala = Project("kamon-scala", file("kamon-scala"))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonMacros % "compile-internal, test-internal")
+ .settings(basicSettings: _* )
+ .settings(formatSettings: _*)
+ .settings(aspectJSettings: _*)
+ .settings(
+ libraryDependencies ++=
+ compile() ++
+ provided(aspectJ) ++
+ optional(scalazConcurrent) ++
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+
lazy val kamonAkkaRemote = Project("kamon-akka-remote", file("kamon-akka-remote"))
- .dependsOn(kamonCore)
+ .dependsOn(kamonAkka)
.settings(basicSettings: _* )
.settings(formatSettings: _*)
.settings(aspectJSettings: _*)
@@ -38,7 +82,7 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaRemote, akkaCluster) ++
provided(aspectJ) ++
- test(scalatest, akkaTestKit))
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
lazy val kamonSpray = Project("kamon-spray", file("kamon-spray"))
@@ -52,8 +96,9 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor, sprayCan, sprayClient, sprayRouting) ++
provided(aspectJ) ++
- test(scalatest, akkaTestKit, sprayTestkit, slf4Api, slf4nop))
- .dependsOn(kamonCore)
+ test(scalatest, akkaTestKit, sprayTestkit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonAkka)
.dependsOn(kamonTestkit % "test")
@@ -66,7 +111,8 @@ object Projects extends Build {
compile(sprayCan, sprayClient, sprayRouting, sprayJson, sprayJsonLenses, newrelic, akkaSlf4j) ++
provided(aspectJ) ++
test(scalatest, akkaTestKit, sprayTestkit, slf4Api, akkaSlf4j))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonTestkit % "compile->compile;test->test")
lazy val kamonPlayground = Project("kamon-playground", file("kamon-playground"))
@@ -108,7 +154,9 @@ object Projects extends Build {
compile(play) ++
provided(aspectJ) ++
test(playTest, akkaTestKit, slf4Api))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonScala)
+ .dependsOn(kamonAkka)
lazy val kamonStatsD = Project("kamon-statsd", file("kamon-statsd"))
.settings(basicSettings: _*)
@@ -117,7 +165,7 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor) ++
test(scalatest, akkaTestKit, slf4Api, slf4nop))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
.dependsOn(kamonSystemMetrics % "provided")
lazy val kamonDatadog = Project("kamon-datadog", file("kamon-datadog"))
@@ -127,7 +175,7 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor) ++
test(scalatest, akkaTestKit, slf4Api, slf4nop))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
.dependsOn(kamonSystemMetrics % "provided")
lazy val kamonLogReporter = Project("kamon-log-reporter", file("kamon-log-reporter"))
@@ -138,7 +186,6 @@ object Projects extends Build {
compile(akkaActor) ++
test(scalatest, akkaTestKit, slf4Api, slf4nop))
.dependsOn(kamonCore)
- .dependsOn(kamonSystemMetrics % "provided")
lazy val kamonMacros = Project("kamon-macros", file("kamon-macros"))
.settings(basicSettings: _*)
@@ -152,9 +199,19 @@ object Projects extends Build {
.settings(fork in Test := true)
.settings(
libraryDependencies ++=
- compile(sigar) ++
- test(scalatest, akkaTestKit, slf4Api, slf4nop))
- .dependsOn(kamonCore)
+ compile(sigarLoader) ++
+ test(scalatest, akkaTestKit, slf4Api, slf4Jul, slf4Log4j, logback))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+
+ lazy val kamonJdbc = Project("kamon-jdbc", file("kamon-jdbc"))
+ .settings(basicSettings: _*)
+ .settings(formatSettings: _*)
+ .settings(aspectJSettings: _*)
+ .settings(
+ libraryDependencies ++=
+ test(h2,scalatest, akkaTestKit, slf4Api) ++
+ provided(aspectJ))
+ .dependsOn(kamonCore % "compile->compile;test->test")
val noPublishing = Seq(publish := (), publishLocal := (), publishArtifact := false)
}
diff --git a/project/Publish.scala b/project/Publish.scala
index 7e30e151..ddb64d2d 100644
--- a/project/Publish.scala
+++ b/project/Publish.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import sbt._
import sbt.Keys._
diff --git a/project/Release.scala b/project/Release.scala
index 8b7ecf30..a388a66f 100644
--- a/project/Release.scala
+++ b/project/Release.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import com.typesafe.sbt.pgp._
import sbt._
import sbt.Keys._
diff --git a/project/Settings.scala b/project/Settings.scala
index 306c88a4..700312c1 100644
--- a/project/Settings.scala
+++ b/project/Settings.scala
@@ -1,6 +1,23 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+import com.typesafe.sbt.SbtAspectj.AspectjKeys._
+import sbt.Tests.{SubProcess, Group}
import sbt._
import Keys._
-import com.typesafe.sbt.SbtScalariform
+import com.typesafe.sbt.{SbtAspectj, SbtScalariform}
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
import Publish.{settings => publishSettings}
import Release.{settings => releaseSettings}
@@ -9,16 +26,23 @@ import net.virtualvoid.sbt.graph.Plugin.graphSettings
object Settings {
- val ScalaVersion = "2.10.4"
+ val JavaVersion = "1.6"
+ val ScalaVersion = "2.10.4"
+
lazy val basicSettings = Seq(
- scalaVersion := ScalaVersion,
- resolvers ++= Dependencies.resolutionRepos,
- fork in run := true,
- javacOptions := Seq(
- "-source", "1.6", "-target", "1.6"
+ scalaVersion := ScalaVersion,
+ resolvers ++= Dependencies.resolutionRepos,
+ fork in run := true,
+ testGrouping in Test := singleTests((definedTests in Test).value, (javaOptions in Test).value),
+ javacOptions in compile := Seq(
+ "-Xlint:-options",
+ "-source", JavaVersion, "-target", JavaVersion
),
- scalacOptions := Seq(
+ javacOptions in doc := Seq(
+ "-source", JavaVersion
+ ),
+ scalacOptions := Seq(
"-encoding",
"utf8",
"-g:vars",
@@ -29,9 +53,19 @@ object Settings {
"-target:jvm-1.6",
"-language:postfixOps",
"-language:implicitConversions",
+ "-Yinline-warnings",
"-Xlog-reflective-calls"
)) ++ publishSettings ++ releaseSettings ++ graphSettings
+
+ def singleTests(tests: Seq[TestDefinition], jvmSettings: Seq[String]): Seq[Group] =
+ tests map { test =>
+ new Group(
+ name = test.name,
+ tests = Seq(test),
+ runPolicy = SubProcess(ForkOptions(runJVMOptions = jvmSettings)))
+ }
+
lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq(
ScalariformKeys.preferences in Compile := formattingPreferences,
ScalariformKeys.preferences in Test := formattingPreferences
diff --git a/project/VersionWithSHA.scala b/project/VersionWithSHA.scala
index 4479b88f..dac45788 100644
--- a/project/VersionWithSHA.scala
+++ b/project/VersionWithSHA.scala
@@ -1,3 +1,18 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
import sbt.Process
object VersionWithSHA {
diff --git a/project/build.properties b/project/build.properties
index 638d14ee..304ec924 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=0.13.1 \ No newline at end of file
+sbt.version=0.13.7 \ No newline at end of file
diff --git a/version.sbt b/version.sbt
index 23b459e3..4eb411e7 100644
--- a/version.sbt
+++ b/version.sbt
@@ -1 +1 @@
-version in ThisBuild := "0.2.6-SNAPSHOT" \ No newline at end of file
+version in ThisBuild := "0.3.6-SNAPSHOT" \ No newline at end of file