aboutsummaryrefslogtreecommitdiff
path: root/kamon-core/src/main/scala/kamon/metrics
diff options
context:
space:
mode:
authorIvan Topolnjak <ivantopo@gmail.com>2014-07-03 14:36:42 -0300
committerIvan Topolnjak <ivantopo@gmail.com>2014-07-03 14:36:18 -0300
commit29068fc70a3e5a17a630c2c7fff951572bb5fa21 (patch)
tree7ec2632f36e9493cb559f510fa3cc3ead7443511 /kamon-core/src/main/scala/kamon/metrics
parent4d5803e579e223c4f4f5cb37ab79ca069a007949 (diff)
downloadKamon-29068fc70a3e5a17a630c2c7fff951572bb5fa21.tar.gz
Kamon-29068fc70a3e5a17a630c2c7fff951572bb5fa21.tar.bz2
Kamon-29068fc70a3e5a17a630c2c7fff951572bb5fa21.zip
! all: refactor the core metric recording instruments and accomodate UserMetrics
This PR is including several changes to the kamon-core, most notably: - Formalize the interface for Histograms, Counters and MinMaxCounters. Making sure that the interfaces are as clean as possible. - Move away from the all Vector[Measurement] based Histogram snapshot to a new approach in which we use a single long to store both the index in the counts array and the frequency on that bucket. The leftmost 2 bytes of each long are used for storing the counts array index and the remaining 6 bytes are used for the actual count, and everything is put into a simple long array. This way only the buckets that actually have values will be included in the snapshot with the smallest possible memory footprint. - Introduce Gauges. - Reorganize the instrumentation for Akka and Scala and rewrite most of the tests of this components to avoid going through the subscription protocol to test. - Introduce trace tests and fixes on various tests. - Necessary changes on new relic, datadog and statsd modules to compile with the new codebase. Pending: - Finish the upgrade of the new relic to the current model. - Introduce proper limit checks for histograms to ensure that we never pass the 2/6 bytes limits. - More testing, more testing, more testing. - Create the KamonStandalone module.
Diffstat (limited to 'kamon-core/src/main/scala/kamon/metrics')
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala70
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala52
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala71
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/Metrics.scala121
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala104
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/Scale.scala31
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala129
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala66
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala52
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala38
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala78
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala58
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/package.scala39
13 files changed, 0 insertions, 909 deletions
diff --git a/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala
deleted file mode 100644
index 9e19dced..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import com.typesafe.config.Config
-import kamon.metrics.instruments.CounterRecorder
-import org.HdrHistogram.HdrRecorder
-
-case class ActorMetrics(name: String) extends MetricGroupIdentity {
- val category = ActorMetrics
-}
-
-object ActorMetrics extends MetricGroupCategory {
- val name = "actor"
-
- case object ProcessingTime extends MetricIdentity { val name, tag = "processing-time" }
- case object MailboxSize extends MetricIdentity { val name, tag = "mailbox-size" }
- case object TimeInMailbox extends MetricIdentity { val name, tag = "time-in-mailbox" }
- case object ErrorCounter extends MetricIdentity { val name, tag = "errors" }
-
- case class ActorMetricRecorder(processingTime: MetricRecorder, mailboxSize: MetricRecorder, timeInMailbox: MetricRecorder, errorCounter: MetricRecorder)
- extends MetricGroupRecorder {
-
- def collect: MetricGroupSnapshot = {
- ActorMetricSnapshot(processingTime.collect(), mailboxSize.collect(), timeInMailbox.collect(), errorCounter.collect())
- }
- }
-
- case class ActorMetricSnapshot(processingTime: MetricSnapshotLike, mailboxSize: MetricSnapshotLike, timeInMailbox: MetricSnapshotLike, errorCounter: MetricSnapshotLike)
- extends MetricGroupSnapshot {
-
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = Map(
- (ProcessingTime -> processingTime),
- (MailboxSize -> mailboxSize),
- (TimeInMailbox -> timeInMailbox),
- (ErrorCounter -> errorCounter))
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = ActorMetricRecorder
-
- def create(config: Config): ActorMetricRecorder = {
- val settings = config.getConfig("precision.actor")
-
- val processingTimeConfig = extractPrecisionConfig(settings.getConfig("processing-time"))
- val mailboxSizeConfig = extractPrecisionConfig(settings.getConfig("mailbox-size"))
- val timeInMailboxConfig = extractPrecisionConfig(settings.getConfig("time-in-mailbox"))
-
- new ActorMetricRecorder(
- HdrRecorder(processingTimeConfig.highestTrackableValue, processingTimeConfig.significantValueDigits, Scale.Nano),
- HdrRecorder(mailboxSizeConfig.highestTrackableValue, mailboxSizeConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(timeInMailboxConfig.highestTrackableValue, timeInMailboxConfig.significantValueDigits, Scale.Nano),
- CounterRecorder())
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala b/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala
deleted file mode 100644
index cd0afac1..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import kamon.metrics.instruments.ContinuousHdrRecorder
-import org.HdrHistogram.HdrRecorder
-import com.typesafe.config.Config
-
-case class CustomMetric(name: String) extends MetricGroupIdentity {
- val category = CustomMetric
-}
-
-object CustomMetric extends MetricGroupCategory {
- val name = "custom-metric"
- val RecordedValues = new MetricIdentity { val name, tag = "recorded-values" }
-
- def histogram(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale, continuous: Boolean = false) =
- new MetricGroupFactory {
-
- type GroupRecorder = CustomMetricRecorder
-
- def create(config: Config): CustomMetricRecorder = {
- val recorder =
- if (continuous) ContinuousHdrRecorder(highestTrackableValue, significantValueDigits, scale)
- else HdrRecorder(highestTrackableValue, significantValueDigits, scale)
-
- new CustomMetricRecorder(RecordedValues, recorder)
- }
- }
-
- class CustomMetricRecorder(identity: MetricIdentity, underlyingRecorder: HdrRecorder)
- extends MetricGroupRecorder {
-
- def record(value: Long): Unit = underlyingRecorder.record(value)
-
- def collect: MetricGroupSnapshot = DefaultMetricGroupSnapshot(Map((identity, underlyingRecorder.collect())))
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala
deleted file mode 100644
index f41e0c3f..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import com.typesafe.config.Config
-import org.HdrHistogram.HdrRecorder
-
-case class DispatcherMetrics(name: String) extends MetricGroupIdentity {
- val category = DispatcherMetrics
-}
-
-object DispatcherMetrics extends MetricGroupCategory {
- val name = "dispatcher"
-
- case object MaximumPoolSize extends MetricIdentity { val name, tag = "maximum-pool-size" }
- case object RunningThreadCount extends MetricIdentity { val name, tag = "running-thread-count" }
- case object QueueTaskCount extends MetricIdentity { val name, tag = "queued-task-count" }
- case object PoolSize extends MetricIdentity { val name, tag = "pool-size" }
-
- case class DispatcherMetricRecorder(maximumPoolSize: MetricRecorder, runningThreadCount: MetricRecorder, queueTaskCount: MetricRecorder, poolSize: MetricRecorder)
- extends MetricGroupRecorder {
-
- def collect: MetricGroupSnapshot = {
- DispatcherMetricSnapshot(maximumPoolSize.collect(), runningThreadCount.collect(), queueTaskCount.collect(), poolSize.collect())
- }
- }
-
- case class DispatcherMetricSnapshot(maximumPoolSize: MetricSnapshotLike, runningThreadCount: MetricSnapshotLike, queueTaskCount: MetricSnapshotLike, poolSize: MetricSnapshotLike)
- extends MetricGroupSnapshot {
-
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = Map(
- (MaximumPoolSize -> maximumPoolSize),
- (RunningThreadCount -> runningThreadCount),
- (QueueTaskCount -> queueTaskCount),
- (PoolSize -> poolSize))
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = DispatcherMetricRecorder
-
- def create(config: Config): DispatcherMetricRecorder = {
- val settings = config.getConfig("precision.dispatcher")
-
- val MaximumPoolSizeConfig = extractPrecisionConfig(settings.getConfig("maximum-pool-size"))
- val RunningThreadCountConfig = extractPrecisionConfig(settings.getConfig("running-thread-count"))
- val QueueTaskCountConfig = extractPrecisionConfig(settings.getConfig("queued-task-count"))
- val PoolSizeConfig = extractPrecisionConfig(settings.getConfig("pool-size"))
-
- new DispatcherMetricRecorder(
- HdrRecorder(MaximumPoolSizeConfig.highestTrackableValue, MaximumPoolSizeConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(RunningThreadCountConfig.highestTrackableValue, RunningThreadCountConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(QueueTaskCountConfig.highestTrackableValue, QueueTaskCountConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(PoolSizeConfig.highestTrackableValue, PoolSizeConfig.significantValueDigits, Scale.Unit))
- }
- }
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metrics/Metrics.scala b/kamon-core/src/main/scala/kamon/metrics/Metrics.scala
deleted file mode 100644
index f07bf38e..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/Metrics.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import annotation.tailrec
-import com.typesafe.config.Config
-import kamon.metrics.MetricSnapshot.Measurement
-import kamon.metrics.InstrumentTypes.InstrumentType
-
-trait MetricGroupCategory {
- def name: String
-}
-
-trait MetricGroupIdentity {
- def name: String
- def category: MetricGroupCategory
-}
-
-trait MetricIdentity {
- def name: String
- def tag: String
-}
-
-trait MetricGroupRecorder {
- def collect: MetricGroupSnapshot
-}
-
-trait MetricGroupSnapshot {
- def metrics: Map[MetricIdentity, MetricSnapshotLike]
-}
-
-case class DefaultMetricGroupSnapshot(metrics: Map[MetricIdentity, MetricSnapshotLike]) extends MetricGroupSnapshot
-
-trait MetricRecorder {
- def record(value: Long)
- def collect(): MetricSnapshotLike
-}
-
-object InstrumentTypes {
- sealed trait InstrumentType
- case object Histogram extends InstrumentType
- case object Gauge extends InstrumentType
- case object Counter extends InstrumentType
-}
-
-trait MetricSnapshotLike {
- def instrumentType: InstrumentType
- def numberOfMeasurements: Long
- def scale: Scale
- def measurements: Vector[Measurement]
-
- def max: Long = measurements.lastOption.map(_.value).getOrElse(0)
- def min: Long = measurements.headOption.map(_.value).getOrElse(0)
-
- def merge(that: MetricSnapshotLike): MetricSnapshotLike = {
- val mergedMeasurements = Vector.newBuilder[Measurement]
-
- @tailrec def go(left: Vector[Measurement], right: Vector[Measurement], totalNrOfMeasurements: Long): Long = {
- if (left.nonEmpty && right.nonEmpty) {
- val leftValue = left.head
- val rightValue = right.head
-
- if (rightValue.value == leftValue.value) {
- val merged = rightValue.merge(leftValue)
- mergedMeasurements += merged
- go(left.tail, right.tail, totalNrOfMeasurements + merged.count)
- } else {
- if (leftValue.value < rightValue.value) {
- mergedMeasurements += leftValue
- go(left.tail, right, totalNrOfMeasurements + leftValue.count)
- } else {
- mergedMeasurements += rightValue
- go(left, right.tail, totalNrOfMeasurements + rightValue.count)
- }
- }
- } else {
- if (left.isEmpty && right.nonEmpty) {
- mergedMeasurements += right.head
- go(left, right.tail, totalNrOfMeasurements + right.head.count)
- } else {
- if (left.nonEmpty && right.isEmpty) {
- mergedMeasurements += left.head
- go(left.tail, right, totalNrOfMeasurements + left.head.count)
- } else totalNrOfMeasurements
- }
- }
- }
-
- val totalNrOfMeasurements = go(measurements, that.measurements, 0)
- MetricSnapshot(instrumentType, totalNrOfMeasurements, scale, mergedMeasurements.result())
- }
-}
-
-case class MetricSnapshot(instrumentType: InstrumentType, numberOfMeasurements: Long, scale: Scale,
- measurements: Vector[MetricSnapshot.Measurement]) extends MetricSnapshotLike
-
-object MetricSnapshot {
- case class Measurement(value: Long, count: Long) {
- def merge(that: Measurement) = Measurement(value, count + that.count)
- }
-}
-
-trait MetricGroupFactory {
- type GroupRecorder <: MetricGroupRecorder
- def create(config: Config): GroupRecorder
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala b/kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala
deleted file mode 100644
index c60babb2..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import scala.collection.concurrent.TrieMap
-import akka.actor._
-import com.typesafe.config.Config
-import kamon.util.GlobPathFilter
-import kamon.Kamon
-import akka.actor
-import kamon.metrics.Metrics.MetricGroupFilter
-import kamon.metrics.Subscriptions.Subscribe
-import java.util.concurrent.TimeUnit
-
-class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- val metricsExtConfig = system.settings.config.getConfig("kamon.metrics")
-
- /** Configured Dispatchers */
- val metricSubscriptionsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.metric-subscriptions"))
- val gaugeRecordingsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.gauge-recordings"))
-
- /** Configuration Settings */
- val gaugeRecordingInterval = metricsExtConfig.getDuration("gauge-recording-interval", TimeUnit.MILLISECONDS)
-
- val storage = TrieMap[MetricGroupIdentity, MetricGroupRecorder]()
- val filters = loadFilters(metricsExtConfig)
- lazy val subscriptions = system.actorOf(Props[Subscriptions], "kamon-metrics-subscriptions")
-
- def register(identity: MetricGroupIdentity, factory: MetricGroupFactory): Option[factory.GroupRecorder] = {
- if (shouldTrack(identity))
- Some(storage.getOrElseUpdate(identity, factory.create(metricsExtConfig)).asInstanceOf[factory.GroupRecorder])
- else
- None
- }
-
- def unregister(identity: MetricGroupIdentity): Unit = {
- storage.remove(identity)
- }
-
- def subscribe[C <: MetricGroupCategory](category: C, selection: String, receiver: ActorRef, permanently: Boolean = false): Unit = {
- subscriptions.tell(Subscribe(category, selection, permanently), receiver)
- }
-
- def collect: Map[MetricGroupIdentity, MetricGroupSnapshot] = {
- (for ((identity, recorder) ← storage) yield (identity, recorder.collect)).toMap
- }
-
- def scheduleGaugeRecorder(body: ⇒ Unit): Cancellable = {
- import scala.concurrent.duration._
-
- system.scheduler.schedule(gaugeRecordingInterval milliseconds, gaugeRecordingInterval milliseconds) {
- body
- }(gaugeRecordingsDispatcher)
- }
-
- private def shouldTrack(identity: MetricGroupIdentity): Boolean = {
- filters.get(identity.category.name).map(filter ⇒ filter.accept(identity.name)).getOrElse(false)
- }
-
- def loadFilters(config: Config): Map[String, MetricGroupFilter] = {
- import scala.collection.JavaConverters._
-
- val filters = config.getObjectList("filters").asScala
-
- val allFilters =
- for (
- filter ← filters;
- entry ← filter.entrySet().asScala
- ) yield {
- val key = entry.getKey
- val keyBasedConfig = entry.getValue.atKey(key)
-
- val includes = keyBasedConfig.getStringList(s"$key.includes").asScala.map(inc ⇒ new GlobPathFilter(inc)).toList
- val excludes = keyBasedConfig.getStringList(s"$key.excludes").asScala.map(exc ⇒ new GlobPathFilter(exc)).toList
-
- (key, MetricGroupFilter(includes, excludes))
- }
-
- allFilters.toMap
- }
-}
-
-object Metrics extends ExtensionId[MetricsExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
- def createExtension(system: ExtendedActorSystem): MetricsExtension = new MetricsExtension(system)
-
- case class MetricGroupFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
- def accept(name: String): Boolean = includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/Scale.scala b/kamon-core/src/main/scala/kamon/metrics/Scale.scala
deleted file mode 100644
index 6899490a..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/Scale.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-class Scale(val numericValue: Double) extends AnyVal
-
-object Scale {
- val Nano = new Scale(1E-9)
- val Micro = new Scale(1E-6)
- val Milli = new Scale(1E-3)
- val Unit = new Scale(1)
- val Kilo = new Scale(1E3)
- val Mega = new Scale(1E6)
- val Giga = new Scale(1E9)
-
- def convert(fromUnit: Scale, toUnit: Scale, value: Long): Double = (value * fromUnit.numericValue) / toUnit.numericValue
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala b/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala
deleted file mode 100644
index c9990229..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import akka.actor.{ Props, ActorRef, Actor }
-import kamon.metrics.Subscriptions.{ MetricGroupFilter, FlushMetrics, TickMetricSnapshot, Subscribe }
-import kamon.util.GlobPathFilter
-import scala.concurrent.duration.{ FiniteDuration, Duration }
-import java.util.concurrent.TimeUnit
-import kamon.Kamon
-import kamon.metrics.TickMetricSnapshotBuffer.{ Combined, FlushBuffer }
-
-class Subscriptions extends Actor {
- import context.system
-
- val config = context.system.settings.config
- val tickInterval = Duration(config.getDuration("kamon.metrics.tick-interval", TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS)
- val flushMetricsSchedule = context.system.scheduler.schedule(tickInterval, tickInterval, self, FlushMetrics)(context.dispatcher)
-
- var lastTick: Long = System.currentTimeMillis()
- var subscribedPermanently: Map[MetricGroupFilter, List[ActorRef]] = Map.empty
- var subscribedForOneShot: Map[MetricGroupFilter, List[ActorRef]] = Map.empty
-
- def receive = {
- case Subscribe(category, selection, permanent) ⇒ subscribe(category, selection, permanent)
- case FlushMetrics ⇒ flush()
- }
-
- def subscribe(category: MetricGroupCategory, selection: String, permanent: Boolean): Unit = {
- val filter = MetricGroupFilter(category, new GlobPathFilter(selection))
- if (permanent) {
- val receivers = subscribedPermanently.get(filter).getOrElse(Nil)
- subscribedPermanently = subscribedPermanently.updated(filter, sender :: receivers)
-
- } else {
- val receivers = subscribedForOneShot.get(filter).getOrElse(Nil)
- subscribedForOneShot = subscribedForOneShot.updated(filter, sender :: receivers)
- }
-
- }
-
- def flush(): Unit = {
- val currentTick = System.currentTimeMillis()
- val snapshots = Kamon(Metrics).collect
-
- dispatchSelectedMetrics(lastTick, currentTick, subscribedPermanently, snapshots)
- dispatchSelectedMetrics(lastTick, currentTick, subscribedForOneShot, snapshots)
-
- lastTick = currentTick
- subscribedForOneShot = Map.empty
- }
-
- def dispatchSelectedMetrics(lastTick: Long, currentTick: Long, subscriptions: Map[MetricGroupFilter, List[ActorRef]],
- snapshots: Map[MetricGroupIdentity, MetricGroupSnapshot]): Unit = {
-
- for ((filter, receivers) ← subscriptions) yield {
- val selection = snapshots.filter(group ⇒ filter.accept(group._1))
- val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
-
- receivers.foreach(_ ! tickMetrics)
- }
- }
-}
-
-object Subscriptions {
- case object FlushMetrics
- case class Subscribe(category: MetricGroupCategory, selection: String, permanently: Boolean = false)
- case class TickMetricSnapshot(from: Long, to: Long, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot])
-
- case class MetricGroupFilter(category: MetricGroupCategory, globFilter: GlobPathFilter) {
- def accept(identity: MetricGroupIdentity): Boolean = {
- category.equals(identity.category) && globFilter.accept(identity.name)
- }
- }
-}
-
-class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
- val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
-
- def receive = empty
-
- def empty: Actor.Receive = {
- case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
- case FlushBuffer ⇒ // Nothing to flush.
- }
-
- def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
- case TickMetricSnapshot(_, to, tickMetrics) ⇒
- val combinedMetrics = combineMaps(buffered.metrics, tickMetrics)(mergeMetricGroup)
- val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
-
- context become (buffering(combinedSnapshot))
-
- case FlushBuffer ⇒
- receiver ! buffered
- context become (empty)
-
- }
-
- override def postStop(): Unit = {
- flushSchedule.cancel()
- super.postStop()
- }
-
- def mergeMetricGroup(left: MetricGroupSnapshot, right: MetricGroupSnapshot) = Combined(combineMaps(left.metrics, right.metrics)((l, r) ⇒ l.merge(r)))
-}
-
-object TickMetricSnapshotBuffer {
- case object FlushBuffer
-
- case class Combined(metrics: Map[MetricIdentity, MetricSnapshotLike]) extends MetricGroupSnapshot
-
- def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
- Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala
deleted file mode 100644
index 5454edf5..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import org.HdrHistogram.HdrRecorder
-import scala.collection.concurrent.TrieMap
-import com.typesafe.config.Config
-
-case class TraceMetrics(name: String) extends MetricGroupIdentity {
- val category = TraceMetrics
-}
-
-object TraceMetrics extends MetricGroupCategory {
- val name = "trace"
-
- case object ElapsedTime extends MetricIdentity { val name, tag = "elapsed-time" }
- case class HttpClientRequest(name: String, tag: String) extends MetricIdentity
-
- class TraceMetricRecorder(val elapsedTime: HdrRecorder, private val segmentRecorderFactory: () ⇒ HdrRecorder)
- extends MetricGroupRecorder {
-
- private val segments = TrieMap[MetricIdentity, HdrRecorder]()
-
- def segmentRecorder(segmentIdentity: MetricIdentity): HdrRecorder =
- segments.getOrElseUpdate(segmentIdentity, segmentRecorderFactory.apply())
-
- def collect: MetricGroupSnapshot = TraceMetricSnapshot(elapsedTime.collect(),
- segments.map { case (identity, recorder) ⇒ (identity, recorder.collect()) }.toMap)
- }
-
- case class TraceMetricSnapshot(elapsedTime: MetricSnapshotLike, segments: Map[MetricIdentity, MetricSnapshotLike])
- extends MetricGroupSnapshot {
-
- def metrics: Map[MetricIdentity, MetricSnapshotLike] = segments + (ElapsedTime -> elapsedTime)
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = TraceMetricRecorder
-
- def create(config: Config): TraceMetricRecorder = {
-
- val settings = config.getConfig("precision.trace")
- val elapsedTimeConfig = extractPrecisionConfig(settings.getConfig("elapsed-time"))
- val segmentConfig = extractPrecisionConfig(settings.getConfig("segment"))
-
- new TraceMetricRecorder(
- HdrRecorder(elapsedTimeConfig.highestTrackableValue, elapsedTimeConfig.significantValueDigits, Scale.Nano),
- () ⇒ HdrRecorder(segmentConfig.highestTrackableValue, segmentConfig.significantValueDigits, Scale.Nano))
- }
- }
-
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala
deleted file mode 100644
index 3a39ec69..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics.instruments
-
-import org.HdrHistogram.HdrRecorder
-import kamon.metrics.{ Scale, MetricSnapshotLike }
-
-/**
- * This recorder keeps track of the last value recoded and automatically adds it after collecting a snapshot. This is
- * useful in cases where the absence of recordings does not necessarily mean the absence of values. For example, if this
- * recorder is used for recording the mailbox size of an actor, and it only gets updated upon message enqueue o dequeue,
- * the absence of recordings during 1 second means that the size hasn't change (example: the actor being blocked doing
- * some work) and it should keep its last known value, instead of dropping to zero and then going back to the real value
- * after a new event is processed.
- *
- */
-class ContinuousHdrRecorder(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale)
- extends HdrRecorder(highestTrackableValue, significantValueDigits, scale) {
-
- @volatile private var lastRecordedValue: Long = 0
-
- override def record(value: Long): Unit = {
- lastRecordedValue = value
- super.record(value)
- }
-
- override def collect(): MetricSnapshotLike = {
- val snapshot = super.collect()
- super.record(lastRecordedValue)
-
- snapshot
- }
-}
-
-object ContinuousHdrRecorder {
- def apply(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale) =
- new ContinuousHdrRecorder(highestTrackableValue, significantValueDigits, scale)
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala
deleted file mode 100644
index e5efbc15..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package kamon.metrics.instruments
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-import kamon.metrics._
-import kamon.metrics.MetricSnapshot.Measurement
-
-import jsr166e.LongAdder
-
-class CounterRecorder extends MetricRecorder {
- private val counter = new LongAdder
-
- def record(value: Long): Unit = {
- counter.add(value)
- }
-
- def collect(): MetricSnapshotLike = {
- val sum = counter.sumThenReset()
- MetricSnapshot(InstrumentTypes.Counter, sum, Scale.Unit, Vector(Measurement(1, sum)))
- }
-}
-
-object CounterRecorder {
- def apply(): CounterRecorder = new CounterRecorder()
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala
deleted file mode 100644
index ce4fd76d..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package org.HdrHistogram
-
-import java.util.concurrent.atomic.AtomicLongFieldUpdater
-import scala.annotation.tailrec
-import kamon.metrics._
-
-/**
- * This implementation aims to be used for real time data collection where data snapshots are taken often over time.
- * The snapshotAndReset() operation extracts all the recorded values from the histogram and resets the counts, but still
- * leave it in a consistent state even in the case of concurrent modification while the snapshot is being taken.
- */
-class HdrRecorder(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale)
- extends AtomicHistogram(1L, highestTrackableValue, significantValueDigits) with MetricRecorder {
-
- import HdrRecorder.totalCountUpdater
-
- def record(value: Long): Unit = recordValue(value)
-
- def collect(): MetricSnapshotLike = {
- val entries = Vector.newBuilder[MetricSnapshot.Measurement]
- val countsLength = counts.length()
-
- @tailrec def iterate(index: Int, previousValue: Long, nrOfRecordings: Long, bucketLimit: Long, increment: Long): Long = {
- if (index < countsLength) {
- val currentValue = previousValue + increment
- val countAtValue = counts.getAndSet(index, 0)
-
- if (countAtValue > 0)
- entries += MetricSnapshot.Measurement(currentValue, countAtValue)
-
- if (currentValue == bucketLimit)
- iterate(index + 1, currentValue, nrOfRecordings + countAtValue, (bucketLimit << 1) + 1, increment << 1)
- else
- iterate(index + 1, currentValue, nrOfRecordings + countAtValue, bucketLimit, increment)
- } else {
- nrOfRecordings
- }
- }
-
- val nrOfRecordings = iterate(0, -1, 0, subBucketMask, 1)
-
- def tryUpdateTotalCount: Boolean = {
- val previousTotalCount = getTotalCount
- val newTotalCount = previousTotalCount - nrOfRecordings
-
- totalCountUpdater.compareAndSet(this, previousTotalCount, newTotalCount)
- }
-
- while (!tryUpdateTotalCount) {}
-
- MetricSnapshot(InstrumentTypes.Histogram, nrOfRecordings, scale, entries.result())
- }
-
-}
-
-object HdrRecorder {
- val totalCountUpdater = AtomicLongFieldUpdater.newUpdater(classOf[AtomicHistogram], "totalCount")
-
- def apply(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale): HdrRecorder =
- new HdrRecorder(highestTrackableValue, significantValueDigits, scale)
-
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala
deleted file mode 100644
index ba2550af..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package kamon.metrics.instruments
-
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-import java.lang.Math._
-import jsr166e.LongMaxUpdater
-import kamon.util.PaddedAtomicLong
-import kamon.metrics.instruments.MinMaxCounter.CounterMeasurement
-
-class MinMaxCounter {
- private val min = new LongMaxUpdater
- private val max = new LongMaxUpdater
- private val sum = new PaddedAtomicLong
-
- min.update(0L)
- max.update(0L)
-
- def increment(value: Long = 1L): Unit = {
- val currentValue = sum.addAndGet(value)
- max.update(currentValue)
- }
-
- def decrement(value: Long = 1L): Unit = {
- val currentValue = sum.addAndGet(-value)
- min.update(-currentValue)
- }
-
- def collect(): CounterMeasurement = {
- val currentValue = {
- val value = sum.get()
- if (value < 0) 0 else value
- }
- val result = CounterMeasurement(abs(min.maxThenReset()), max.maxThenReset(), currentValue)
- max.update(currentValue)
- min.update(-currentValue)
- result
- }
-}
-
-object MinMaxCounter {
- def apply() = new MinMaxCounter()
-
- case class CounterMeasurement(min: Long, max: Long, current: Long)
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/package.scala b/kamon-core/src/main/scala/kamon/metrics/package.scala
deleted file mode 100644
index 640157a9..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/package.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon
-
-import scala.annotation.tailrec
-import com.typesafe.config.Config
-
-package object metrics {
-
- case class HdrPrecisionConfig(highestTrackableValue: Long, significantValueDigits: Int)
-
- def extractPrecisionConfig(config: Config): HdrPrecisionConfig =
- HdrPrecisionConfig(config.getLong("highest-trackable-value"), config.getInt("significant-value-digits"))
-
- @tailrec def combineMaps[K, V](left: Map[K, V], right: Map[K, V])(valueMerger: (V, V) ⇒ V): Map[K, V] = {
- if (right.isEmpty)
- left
- else {
- val (key, rightValue) = right.head
- val value = left.get(key).map(valueMerger(_, rightValue)).getOrElse(rightValue)
-
- combineMaps(left.updated(key, value), right.tail)(valueMerger)
- }
- }
-}