From a3d78ef61a277b0b62dc93daf84756dfa7625d3d Mon Sep 17 00:00:00 2001 From: Ivan Topolnjak Date: Thu, 25 May 2017 16:52:52 +0200 Subject: trying to flatten out the structure and eliminate the notion of entitites --- kamon-core/src/main/scala/kamon/Kamon.scala | 22 +- .../src/main/scala/kamon/ReporterRegistry.scala | 6 +- .../src/main/scala/kamon/metric/Entity.scala | 11 - .../src/main/scala/kamon/metric/EntityFilter.scala | 110 ---------- .../main/scala/kamon/metric/EntityRecorder.scala | 90 --------- .../main/scala/kamon/metric/EntitySnapshot.scala | 11 - .../src/main/scala/kamon/metric/Filter.scala | 110 ++++++++++ .../main/scala/kamon/metric/RecorderRegistry.scala | 61 ------ .../src/main/scala/kamon/metric/Registry.scala | 222 +++++++++++++++++++++ .../src/main/scala/kamon/metric/TickSnapshot.scala | 10 +- .../scala/kamon/metric/instrument/Counter.scala | 8 +- .../main/scala/kamon/metric/instrument/Gauge.scala | 8 +- .../scala/kamon/metric/instrument/Histogram.scala | 13 +- .../metric/instrument/InstrumentFactory.scala | 78 +++----- .../metric/instrument/InstrumentSnapshot.scala | 11 +- .../kamon/metric/instrument/MinMaxCounter.scala | 5 +- kamon-core/src/main/scala/kamon/package.scala | 4 + kamon-core/src/main/scala/kamon/trace/Span.scala | 31 ++- kamon-core/src/main/scala/kamon/trace/Tracer.scala | 12 +- 19 files changed, 435 insertions(+), 388 deletions(-) delete mode 100644 kamon-core/src/main/scala/kamon/metric/Entity.scala delete mode 100644 kamon-core/src/main/scala/kamon/metric/EntityFilter.scala delete mode 100644 kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala delete mode 100644 kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala create mode 100644 kamon-core/src/main/scala/kamon/metric/Filter.scala delete mode 100644 kamon-core/src/main/scala/kamon/metric/RecorderRegistry.scala create mode 100644 kamon-core/src/main/scala/kamon/metric/Registry.scala (limited to 'kamon-core/src/main/scala/kamon') diff --git a/kamon-core/src/main/scala/kamon/Kamon.scala b/kamon-core/src/main/scala/kamon/Kamon.scala index b318d59d..60956ee0 100644 --- a/kamon-core/src/main/scala/kamon/Kamon.scala +++ b/kamon-core/src/main/scala/kamon/Kamon.scala @@ -3,22 +3,24 @@ package kamon import java.util.concurrent.atomic.AtomicReference import com.typesafe.config.{Config, ConfigFactory} -import kamon.metric.{RecorderRegistry, RecorderRegistryImpl} +import kamon.metric.instrument.{DynamicRange, Histogram} +import kamon.metric.{MetricLookup, Registry} import kamon.trace.Tracer +import kamon.util.MeasurementUnit -object Kamon { +object Kamon extends MetricLookup { private val _initialConfig = ConfigFactory.load() - private val _recorderRegistry = new RecorderRegistryImpl(_initialConfig) - private val _reporterRegistry = new ReporterRegistryImpl(_recorderRegistry, _initialConfig) - private val _tracer = new Tracer(_recorderRegistry, _reporterRegistry) + //private val _recorderRegistry = new RecorderRegistryImpl(_initialConfig) + private val _reporterRegistry = new ReporterRegistryImpl(???, _initialConfig) + private val _tracer = new Tracer(???, _reporterRegistry) private val _environment = new AtomicReference[Environment](environmentFromConfig(ConfigFactory.load())) def tracer: io.opentracing.Tracer = _tracer - def metrics: RecorderRegistry = - _recorderRegistry +// def metrics: RecorderRegistry = +// _recorderRegistry def reporters: ReporterRegistry = _reporterRegistry @@ -27,13 +29,15 @@ object Kamon { _environment.get() def reconfigure(config: Config): Unit = synchronized { - _recorderRegistry.reconfigure(config) + // _recorderRegistry.reconfigure(config) _reporterRegistry.reconfigure(config) _environment.set(environmentFromConfig(config)) } - + private val metricRegistry = new Registry(_initialConfig) + override def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String], dynamicRange: Option[DynamicRange]): Histogram = + metricRegistry.histogram(name, unit, tags, dynamicRange) case class Environment(config: Config, application: String, host: String, instance: String) diff --git a/kamon-core/src/main/scala/kamon/ReporterRegistry.scala b/kamon-core/src/main/scala/kamon/ReporterRegistry.scala index 98bde946..11312512 100644 --- a/kamon-core/src/main/scala/kamon/ReporterRegistry.scala +++ b/kamon-core/src/main/scala/kamon/ReporterRegistry.scala @@ -44,7 +44,7 @@ trait SpansReporter { def reportSpan(span: Span.CompletedSpan): Unit } -class ReporterRegistryImpl(metrics: RecorderRegistryImpl, initialConfig: Config) extends ReporterRegistry { +class ReporterRegistryImpl(metrics: RegistrySnapshotGenerator, initialConfig: Config) extends ReporterRegistry { private val registryExecutionContext = Executors.newSingleThreadScheduledExecutor(threadFactory("kamon-reporter-registry")) private val metricsTickerSchedule = new AtomicReference[ScheduledFuture[_]]() private val metricReporters = new ConcurrentLinkedQueue[ReporterEntry]() @@ -141,7 +141,7 @@ class ReporterRegistryImpl(metrics: RecorderRegistryImpl, initialConfig: Config) executionContext: ExecutionContextExecutorService ) - private class MetricTicker(metricsImpl: RecorderRegistryImpl, reporterEntries: java.util.Queue[ReporterEntry]) extends Runnable { + private class MetricTicker(snapshotGenerator: RegistrySnapshotGenerator, reporterEntries: java.util.Queue[ReporterEntry]) extends Runnable { val logger = LoggerFactory.getLogger(classOf[MetricTicker]) var lastTick = Instant.now() @@ -149,7 +149,7 @@ class ReporterRegistryImpl(metrics: RecorderRegistryImpl, initialConfig: Config) val currentTick = Instant.now() val tickSnapshot = TickSnapshot( interval = Interval(lastTick, currentTick), - entities = metricsImpl.snapshot() + metrics = snapshotGenerator.snapshot() ) reporterEntries.forEach { entry => diff --git a/kamon-core/src/main/scala/kamon/metric/Entity.scala b/kamon-core/src/main/scala/kamon/metric/Entity.scala deleted file mode 100644 index a38a7f46..00000000 --- a/kamon-core/src/main/scala/kamon/metric/Entity.scala +++ /dev/null @@ -1,11 +0,0 @@ -package kamon.metric - - - -case class Entity(name: String, category: String, tags: Map[String, String]) { - - override def toString: String = { - val tagString = tags.map { case (k, v) => k + "=>" + v }.mkString(",") - "Entity(name=\"" + name + "\", category=\"" + category + "\", tags=\"" + tagString + "\"" - } -} diff --git a/kamon-core/src/main/scala/kamon/metric/EntityFilter.scala b/kamon-core/src/main/scala/kamon/metric/EntityFilter.scala deleted file mode 100644 index 77fbfc4b..00000000 --- a/kamon-core/src/main/scala/kamon/metric/EntityFilter.scala +++ /dev/null @@ -1,110 +0,0 @@ -package kamon -package metric - -import java.util.regex.Pattern -import com.typesafe.config.Config - -object EntityFilter { - def fromConfig(config: Config): EntityFilter = { - val filtersConfig = config.getConfig("kamon.metric.filters") - val acceptUnmatched = filtersConfig.getBoolean("accept-unmatched-categories") - - val perCategoryFilters = filtersConfig.firstLevelKeys.filter(_ != "accept-unmatched-categories") map { category: String ⇒ - val includes = readFilters(filtersConfig, s"$category.includes") - val excludes = readFilters(filtersConfig, s"$category.excludes") - - (category, new IncludeExcludeNameFilter(includes, excludes)) - } toMap - - new EntityFilter(perCategoryFilters, acceptUnmatched) - } - - private def readFilters(filtersConfig: Config, name: String): Seq[NameFilter] = { - import scala.collection.JavaConverters._ - if(filtersConfig.hasPath(name)) - filtersConfig.getStringList(name).asScala.map(readNameFilter) - else - Seq.empty - } - - private def readNameFilter(pattern: String): NameFilter = { - if(pattern.startsWith("regex:")) - new RegexNameFilter(pattern.drop(6)) - else if(pattern.startsWith("glob:")) - new GlobPathFilter(pattern.drop(5)) - else - new GlobPathFilter(pattern) - } -} - -class EntityFilter(perCategoryFilters: Map[String, NameFilter], acceptUnmatched: Boolean) { - def accept(entity: Entity): Boolean = - perCategoryFilters - .get(entity.category) - .map(_.accept(entity.name)) - .getOrElse(acceptUnmatched) -} - -trait NameFilter { - def accept(name: String): Boolean -} - -class IncludeExcludeNameFilter(includes: Seq[NameFilter], excludes: Seq[NameFilter]) extends NameFilter { - override def accept(name: String): Boolean = - includes.exists(_.accept(name)) && !excludes.exists(_.accept(name)) -} - -class RegexNameFilter(path: String) extends NameFilter { - private val pathRegex = path.r - - override def accept(path: String): Boolean = path match { - case pathRegex(_*) ⇒ true - case _ ⇒ false - } -} - -class GlobPathFilter(glob: String) extends NameFilter { - private val globPattern = Pattern.compile("(\\*\\*?)|(\\?)|(\\\\.)|(/+)|([^*?]+)") - private val compiledPattern = getGlobPattern(glob) - - override def accept(name: String): Boolean = - compiledPattern.matcher(name).matches() - - private def getGlobPattern(glob: String) = { - val patternBuilder = new StringBuilder - val matcher = globPattern.matcher(glob) - while (matcher.find()) { - val (grp1, grp2, grp3, grp4) = (matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4)) - if (grp1 != null) { - // match a * or ** - if (grp1.length == 2) { - // it's a *workers are able to process multiple metrics* - patternBuilder.append(".*") - } - else { - // it's a * - patternBuilder.append("[^/]*") - } - } - else if (grp2 != null) { - // match a '?' glob pattern; any non-slash character - patternBuilder.append("[^/]") - } - else if (grp3 != null) { - // backslash-escaped value - patternBuilder.append(Pattern.quote(grp3.substring(1))) - } - else if (grp4 != null) { - // match any number of / chars - patternBuilder.append("/+") - } - else { - // some other string - patternBuilder.append(Pattern.quote(matcher.group)) - } - } - - Pattern.compile(patternBuilder.toString) - } -} - diff --git a/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala b/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala deleted file mode 100644 index ccdb463e..00000000 --- a/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala +++ /dev/null @@ -1,90 +0,0 @@ -package kamon.metric - -import java.time.Duration -import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, TimeUnit} - -import kamon.metric.instrument._ -import kamon.util.MeasurementUnit - -import scala.collection.concurrent.TrieMap -import scala.util.Try - -trait EntityRecorder { - def histogram(name: String): Histogram - def histogram(name: String, measurementUnit: MeasurementUnit, dynamicRange: DynamicRange): Histogram - - def minMaxCounter(name: String): MinMaxCounter - def minMaxCounter(name: String, measurementUnit: MeasurementUnit, dynamicRange: DynamicRange, sampleFrequency: Duration): MinMaxCounter - - def gauge(name: String): Gauge - def gauge(name: String, measurementUnit: MeasurementUnit): Gauge - - def counter(name: String): Counter - def counter(name: String, measurementUnit: MeasurementUnit): Counter -} - -trait EntitySnapshotProducer { - def snapshot(): EntitySnapshot -} - -class DefaultEntityRecorder(entity: Entity, instrumentFactory: InstrumentFactory, scheduler: ScheduledExecutorService) - extends EntityRecorder with EntitySnapshotProducer { - - private val histograms = TrieMap.empty[String, Histogram with DistributionSnapshotInstrument] - private val minMaxCounters = TrieMap.empty[String, MinMaxCounterEntry] - private val counters = TrieMap.empty[String, Counter with SingleValueSnapshotInstrument] - private val gauges = TrieMap.empty[String, Gauge with SingleValueSnapshotInstrument] - - def histogram(name: String): Histogram = - histograms.atomicGetOrElseUpdate(name, instrumentFactory.buildHistogram(entity, name)) - - def histogram(name: String, measurementUnit: MeasurementUnit, dynamicRange: DynamicRange): Histogram = - histograms.atomicGetOrElseUpdate(name, instrumentFactory.buildHistogram(entity, name, dynamicRange, measurementUnit)) - - def minMaxCounter(name: String): MinMaxCounter = - minMaxCounters.atomicGetOrElseUpdate(name, - createMMCounterEntry(instrumentFactory.buildMinMaxCounter(entity, name)) - ).mmCounter - - def minMaxCounter(name: String, measurementUnit: MeasurementUnit, dynamicRange: DynamicRange, sampleFrequency: Duration): MinMaxCounter = - minMaxCounters.atomicGetOrElseUpdate(name, - createMMCounterEntry(instrumentFactory.buildMinMaxCounter(entity, name, dynamicRange, sampleFrequency, measurementUnit)) - ).mmCounter - - def gauge(name: String): Gauge = - gauges.atomicGetOrElseUpdate(name, instrumentFactory.buildGauge(entity, name)) - - def gauge(name: String, measurementUnit: MeasurementUnit): Gauge = - gauges.atomicGetOrElseUpdate(name, instrumentFactory.buildGauge(entity, name, measurementUnit)) - - def counter(name: String): Counter = - counters.atomicGetOrElseUpdate(name, instrumentFactory.buildCounter(entity, name)) - - def counter(name: String, measurementUnit: MeasurementUnit): Counter = - counters.atomicGetOrElseUpdate(name, instrumentFactory.buildCounter(entity, name, measurementUnit)) - - def snapshot(): EntitySnapshot = - new EntitySnapshot( - entity, - histograms = histograms.values.map(_.snapshot()).toSeq, - minMaxCounters = minMaxCounters.values.map(_.mmCounter.snapshot()).toSeq, - gauges = gauges.values.map(_.snapshot()).toSeq, - counters = counters.values.map(_.snapshot()).toSeq - ) - - def cleanup(): Unit = { - minMaxCounters.values.foreach { mmCounter => - Try(mmCounter.refreshFuture.cancel(true)) - } - } - - private case class MinMaxCounterEntry(mmCounter: MinMaxCounter with DistributionSnapshotInstrument, refreshFuture: ScheduledFuture[_]) - - private def createMMCounterEntry(mmCounter: MinMaxCounter with DistributionSnapshotInstrument): MinMaxCounterEntry = { - val refreshFuture = scheduler.schedule(new Runnable { - override def run(): Unit = mmCounter.sample() - }, mmCounter.sampleInterval.toMillis, TimeUnit.MILLISECONDS) - - MinMaxCounterEntry(mmCounter, refreshFuture) - } -} \ No newline at end of file diff --git a/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala b/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala deleted file mode 100644 index a7db93eb..00000000 --- a/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala +++ /dev/null @@ -1,11 +0,0 @@ -package kamon.metric - -import kamon.metric.instrument.{DistributionSnapshot, SingleValueSnapshot} - -class EntitySnapshot( - val entity: Entity, - val histograms: Seq[DistributionSnapshot], - val minMaxCounters: Seq[DistributionSnapshot], - val gauges: Seq[SingleValueSnapshot], - val counters: Seq[SingleValueSnapshot] -) \ No newline at end of file diff --git a/kamon-core/src/main/scala/kamon/metric/Filter.scala b/kamon-core/src/main/scala/kamon/metric/Filter.scala new file mode 100644 index 00000000..3ad4f7bd --- /dev/null +++ b/kamon-core/src/main/scala/kamon/metric/Filter.scala @@ -0,0 +1,110 @@ +package kamon +package metric + +import java.util.regex.Pattern +import com.typesafe.config.Config + +object Filter { + def fromConfig(config: Config): Filter = { + val filtersConfig = config.getConfig("kamon.metric.filters") + val acceptUnmatched = filtersConfig.getBoolean("accept-unmatched") + + val perMetricFilter = filtersConfig.firstLevelKeys.filter(_ != "accept-unmatched") map { metricName: String ⇒ + val includes = readFilters(filtersConfig, s"$metricName.includes") + val excludes = readFilters(filtersConfig, s"$metricName.excludes") + + (metricName, new IncludeExcludeNameFilter(includes, excludes)) + } toMap + + new Filter(perMetricFilter, acceptUnmatched) + } + + private def readFilters(filtersConfig: Config, name: String): Seq[NameFilter] = { + import scala.collection.JavaConverters._ + if(filtersConfig.hasPath(name)) + filtersConfig.getStringList(name).asScala.map(readNameFilter) + else + Seq.empty + } + + private def readNameFilter(pattern: String): NameFilter = { + if(pattern.startsWith("regex:")) + new RegexNameFilter(pattern.drop(6)) + else if(pattern.startsWith("glob:")) + new GlobPathFilter(pattern.drop(5)) + else + new GlobPathFilter(pattern) + } +} + +class Filter(perMetricFilter: Map[String, NameFilter], acceptUnmatched: Boolean) { + def accept(metricName: String, pattern: String): Boolean = + perMetricFilter + .get(metricName) + .map(_.accept(pattern)) + .getOrElse(acceptUnmatched) +} + +trait NameFilter { + def accept(name: String): Boolean +} + +class IncludeExcludeNameFilter(includes: Seq[NameFilter], excludes: Seq[NameFilter]) extends NameFilter { + override def accept(name: String): Boolean = + includes.exists(_.accept(name)) && !excludes.exists(_.accept(name)) +} + +class RegexNameFilter(pattern: String) extends NameFilter { + private val pathRegex = pattern.r + + override def accept(name: String): Boolean = name match { + case pathRegex(_*) ⇒ true + case _ ⇒ false + } +} + +class GlobPathFilter(glob: String) extends NameFilter { + private val globPattern = Pattern.compile("(\\*\\*?)|(\\?)|(\\\\.)|(/+)|([^*?]+)") + private val compiledPattern = getGlobPattern(glob) + + override def accept(name: String): Boolean = + compiledPattern.matcher(name).matches() + + private def getGlobPattern(glob: String) = { + val patternBuilder = new StringBuilder + val matcher = globPattern.matcher(glob) + while (matcher.find()) { + val (grp1, grp2, grp3, grp4) = (matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4)) + if (grp1 != null) { + // match a * or ** + if (grp1.length == 2) { + // it's a *workers are able to process multiple metrics* + patternBuilder.append(".*") + } + else { + // it's a * + patternBuilder.append("[^/]*") + } + } + else if (grp2 != null) { + // match a '?' glob pattern; any non-slash character + patternBuilder.append("[^/]") + } + else if (grp3 != null) { + // backslash-escaped value + patternBuilder.append(Pattern.quote(grp3.substring(1))) + } + else if (grp4 != null) { + // match any number of / chars + patternBuilder.append("/+") + } + else { + // some other string + patternBuilder.append(Pattern.quote(matcher.group)) + } + } + + Pattern.compile(patternBuilder.toString) + } +} + diff --git a/kamon-core/src/main/scala/kamon/metric/RecorderRegistry.scala b/kamon-core/src/main/scala/kamon/metric/RecorderRegistry.scala deleted file mode 100644 index fd728b1d..00000000 --- a/kamon-core/src/main/scala/kamon/metric/RecorderRegistry.scala +++ /dev/null @@ -1,61 +0,0 @@ -package kamon -package metric - -import java.util.concurrent.ScheduledThreadPoolExecutor -import java.util.concurrent.atomic.AtomicReference - -import com.typesafe.config.Config -import kamon.metric.instrument.InstrumentFactory - -import scala.collection.concurrent.TrieMap - - -trait RecorderRegistry { - def shouldTrack(entity: Entity): Boolean - def getRecorder(entity: Entity): EntityRecorder - def removeRecorder(entity: Entity): Boolean -} - -class RecorderRegistryImpl(initialConfig: Config) extends RecorderRegistry { - private val scheduler = new ScheduledThreadPoolExecutor(1, numberedThreadFactory("kamon.metric.refresh-scheduler")) - private val instrumentFactory = new AtomicReference[InstrumentFactory]() - private val entityFilter = new AtomicReference[EntityFilter]() - private val entities = TrieMap.empty[Entity, EntityRecorder with EntitySnapshotProducer] - - reconfigure(initialConfig) - - - override def shouldTrack(entity: Entity): Boolean = - entityFilter.get().accept(entity) - - override def getRecorder(entity: Entity): EntityRecorder = - entities.atomicGetOrElseUpdate(entity, new DefaultEntityRecorder(entity, instrumentFactory.get(), scheduler)) - - override def removeRecorder(entity: Entity): Boolean = - entities.remove(entity).nonEmpty - - private[kamon] def reconfigure(config: Config): Unit = synchronized { - instrumentFactory.set(InstrumentFactory.fromConfig(config)) - entityFilter.set(EntityFilter.fromConfig(config)) - - val refreshSchedulerPoolSize = config.getInt("kamon.metric.refresh-scheduler-pool-size") - scheduler.setCorePoolSize(refreshSchedulerPoolSize) - } - - private[kamon] def snapshot(): Seq[EntitySnapshot] = { - entities.values.map(_.snapshot()).toSeq - } - - //private[kamon] def diagnosticData -} - -case class RecorderRegistryDiagnostic(entities: Seq[Entity]) - - - - - - - - - diff --git a/kamon-core/src/main/scala/kamon/metric/Registry.scala b/kamon-core/src/main/scala/kamon/metric/Registry.scala new file mode 100644 index 00000000..3f549802 --- /dev/null +++ b/kamon-core/src/main/scala/kamon/metric/Registry.scala @@ -0,0 +1,222 @@ +package kamon +package metric + +import java.time.Duration +import java.util.concurrent.ScheduledThreadPoolExecutor +import java.util.concurrent.atomic.AtomicReference + +import com.typesafe.config.{Config, ConfigFactory} +import com.typesafe.scalalogging.Logger +import kamon.metric.instrument._ +import kamon.util.MeasurementUnit + +import scala.collection.concurrent.TrieMap +/* + + +Kamon.metrics.histogram("http.latency").withMeasurementUnit(Time.Microseconds) + + +Histogram.create("http.latency", Time.Milliseconds) + + + +val histogram = Histogram.builder("http.latency") + .tag("method", "get") + .build() + + +val actorMetrics = MetricGroup("method" -> "get") + + +val actorMetrics = MetricGroup.builder() + .tag("method", "get") + .build() + +actorMetrics.histogram( + +Options for a Histogram: + - MeasurementUnit + - Dynamic Range + +HistogramConfig.forLatency().inMicroseconds() + +Kamon.metrics.histogram("http.latency").withoutTags() +Kamon.metrics.histogram("http.latency").withTag("method", "get") + + + + +Kamon.metrics.histogram("http.latency", Tag.of("color", "blue"), Tag.of("color", "blue")) + +Kamon.histogram(named("http.latency").withTag("path", path)) +Kamon.counter(named("http.latency").withTag("path", path)) + + + + + + + + +val group = Kamon.metrics.group(tags = Map("path" -> "/my-system/user/test-actor")) +val processingTime = group.histogram("processing-time") + + + + def histogram(name: String): Histogram = + histogram(name, MeasurementUnit.none) + + def histogram(name: String, unit: MeasurementUnit): Histogram = + histogram(name, unit, Map.empty) + + def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String]): Histogram = + histogram(name, unit, tags, DynamicRange.Default) + + + + */ + +trait MetricLookup { + + def histogram(name: String): Histogram = + histogram(name, MeasurementUnit.none) + + def histogram(name: String, unit: MeasurementUnit): Histogram = + histogram(name, unit, Map.empty) + + def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String]): Histogram = + histogram(name, unit, tags, None) + + def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String], dynamicRange: DynamicRange): Histogram = + histogram(name, unit, tags, Some(dynamicRange)) + + def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String], dynamicRange: Option[DynamicRange]): Histogram + +} + +class Registry(initialConfig: Config) extends RegistrySnapshotGenerator { + private val logger = Logger(classOf[Registry]) + private val metrics = TrieMap.empty[String, MetricEntry] + private val instrumentFactory = new AtomicReference[InstrumentFactory]() + reconfigure(initialConfig) + + def reconfigure(config: Config): Unit = synchronized { + instrumentFactory.set(InstrumentFactory.fromConfig(config)) + } + + def histogram(name: String, unit: MeasurementUnit, tags: Map[String, String], dynamicRange: Option[DynamicRange]): Histogram = + lookupInstrument(name, unit, tags, InstrumentType.Histogram, instrumentFactory.get().buildHistogram(dynamicRange)) + + def counter(name: String, unit: MeasurementUnit, tags: Map[String, String]): Counter = + lookupInstrument(name, unit, tags, InstrumentType.Counter, instrumentFactory.get().buildCounter) + + def gauge(name: String, unit: MeasurementUnit, tags: Map[String, String]): Gauge = + lookupInstrument(name, unit, tags, InstrumentType.Gauge, instrumentFactory.get().buildGauge) + + def minMaxCounter(name: String, unit: MeasurementUnit, tags: Map[String, String], dynamicRange: Option[DynamicRange], sampleInterval: Option[Duration]): MinMaxCounter = + lookupInstrument(name, unit, tags, InstrumentType.MinMaxCounter, instrumentFactory.get().buildMinMaxCounter(dynamicRange, sampleInterval)) + + + override def snapshot(): RegistrySnapshot = synchronized { + var histograms = Seq.empty[DistributionSnapshot] + var mmCounters = Seq.empty[DistributionSnapshot] + var counters = Seq.empty[SingleValueSnapshot] + var gauges = Seq.empty[SingleValueSnapshot] + + for { + metricEntry <- metrics.values + instrument <- metricEntry.instruments.values + } { + metricEntry.instrumentType match { + case InstrumentType.Histogram => histograms = histograms :+ instrument.asInstanceOf[SnapshotableHistogram].snapshot() + case InstrumentType.MinMaxCounter => mmCounters = mmCounters :+ instrument.asInstanceOf[SnapshotableMinMaxCounter].snapshot() + case InstrumentType.Gauge => gauges = gauges :+ instrument.asInstanceOf[SnapshotableGauge].snapshot() + case InstrumentType.Counter => counters = counters :+ instrument.asInstanceOf[SnapshotableCounter].snapshot() + } + } + + RegistrySnapshot(histograms, mmCounters, gauges, counters) + } + + private def lookupInstrument[T](name: String, measurementUnit: MeasurementUnit, tags: Map[String, String], + instrumentType: InstrumentType, builder: (String, Map[String, String], MeasurementUnit) => T): T = { + + val entry = metrics.atomicGetOrElseUpdate(name, MetricEntry(instrumentType, measurementUnit, TrieMap.empty)) + if(entry.instrumentType != instrumentType) + sys.error(s"Tried to use metric [$name] as a [${instrumentType.name}] but it is already defined as [${entry.instrumentType.name}] ") + + if(entry.unit != measurementUnit) + logger.warn("Ignoring attempt to use measurement unit [{}] on metric [name={}, tags={}], the metric uses [{}]", + measurementUnit.magnitude.name, name, tags.prettyPrint(), entry.unit.magnitude.name) + + entry.instruments.getOrElseUpdate(tags, builder(name, tags, measurementUnit)).asInstanceOf[T] + } + + private case class InstrumentType(name: String) + private object InstrumentType { + val Histogram = InstrumentType("Histogram") + val MinMaxCounter = InstrumentType("MinMaxCounter") + val Counter = InstrumentType("Counter") + val Gauge = InstrumentType("Gauge") + } + + private case class MetricEntry(instrumentType: InstrumentType, unit: MeasurementUnit, instruments: TrieMap[Map[String, String], Any]) +} + + + +// +// +//trait RecorderRegistry { +// def shouldTrack(entity: Entity): Boolean +// def getRecorder(entity: Entity): EntityRecorder +// def removeRecorder(entity: Entity): Boolean +//} +// +//class RecorderRegistryImpl(initialConfig: Config) extends RecorderRegistry { +// private val scheduler = new ScheduledThreadPoolExecutor(1, numberedThreadFactory("kamon.metric.refresh-scheduler")) +// private val instrumentFactory = new AtomicReference[InstrumentFactory]() +// private val entityFilter = new AtomicReference[Filter]() +// private val entities = TrieMap.empty[Entity, EntityRecorder with EntitySnapshotProducer] +// +// reconfigure(initialConfig) +// +// +// override def shouldTrack(entity: Entity): Boolean = +// entityFilter.get().accept(entity) +// +// override def getRecorder(entity: Entity): EntityRecorder = +// entities.atomicGetOrElseUpdate(entity, new DefaultEntityRecorder(entity, instrumentFactory.get(), scheduler)) +// +// override def removeRecorder(entity: Entity): Boolean = +// entities.remove(entity).nonEmpty +// +// private[kamon] def reconfigure(config: Config): Unit = synchronized { +// instrumentFactory.set(InstrumentFactory.fromConfig(config)) +// entityFilter.set(Filter.fromConfig(config)) +// +// val refreshSchedulerPoolSize = config.getInt("kamon.metric.refresh-scheduler-pool-size") +// scheduler.setCorePoolSize(refreshSchedulerPoolSize) +// } +// +// //private[kamon] def diagnosticData +//} +// +//case class RecorderRegistryDiagnostic(entities: Seq[Entity]) +// + + +object Test extends App { + val registry = new Registry(ConfigFactory.load()) + + println(registry.histogram("test-1", MeasurementUnit.none, Map.empty, Some(DynamicRange.Default)).dynamicRange) + println(registry.histogram("test-2", MeasurementUnit.none, Map.empty, Option(DynamicRange.Fine)).dynamicRange) + + println(Kamon.histogram("my-test")) +} + + + + + diff --git a/kamon-core/src/main/scala/kamon/metric/TickSnapshot.scala b/kamon-core/src/main/scala/kamon/metric/TickSnapshot.scala index f4578965..fe027c91 100644 --- a/kamon-core/src/main/scala/kamon/metric/TickSnapshot.scala +++ b/kamon-core/src/main/scala/kamon/metric/TickSnapshot.scala @@ -2,8 +2,16 @@ package kamon.metric import java.time.Instant -case class TickSnapshot(interval: Interval, entities: Seq[EntitySnapshot]) +import kamon.metric.instrument.{DistributionSnapshot, SingleValueSnapshot} + case class Interval(from: Instant, to: Instant) +case class RegistrySnapshot(histograms: Seq[DistributionSnapshot], minMaxCounters: Seq[DistributionSnapshot], + gauges: Seq[SingleValueSnapshot], counters: Seq[SingleValueSnapshot]) + +case class TickSnapshot(interval: Interval, metrics: RegistrySnapshot) +trait RegistrySnapshotGenerator { + def snapshot(): RegistrySnapshot +} diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala index 10b9c3a6..f18e771c 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala @@ -14,8 +14,8 @@ trait Counter { def increment(times: Long): Unit } -class LongAdderCounter(entity: Entity, name: String, val measurementUnit: MeasurementUnit) - extends Counter with SingleValueSnapshotInstrument with StrictLogging { +class LongAdderCounter(name: String, tags: Map[String, String], val measurementUnit: MeasurementUnit) + extends SnapshotableCounter with StrictLogging { private val adder = new LongAdder() @@ -26,9 +26,9 @@ class LongAdderCounter(entity: Entity, name: String, val measurementUnit: Measur if (times >= 0) adder.add(times) else - logger.warn(s"Ignored attempt to decrement counter [$name] on entity [$entity]") + logger.warn(s"Ignored attempt to decrement counter [$name]") } def snapshot(): SingleValueSnapshot = - SingleValueSnapshot(name, measurementUnit, adder.sumThenReset()) + SingleValueSnapshot(name, tags, measurementUnit, adder.sumThenReset()) } diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala index 5263d258..acbff912 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala @@ -1,8 +1,6 @@ package kamon.metric.instrument import java.util.concurrent.atomic.AtomicLong - -import kamon.metric.Entity import kamon.util.MeasurementUnit trait Gauge { @@ -16,8 +14,8 @@ trait Gauge { } -class AtomicLongGauge(entity: Entity, name: String, val measurementUnit: MeasurementUnit) - extends Gauge with SingleValueSnapshotInstrument { +class AtomicLongGauge(name: String, tags: Map[String, String], val measurementUnit: MeasurementUnit) + extends SnapshotableGauge { private val currentValue = new AtomicLong(0L) @@ -37,5 +35,5 @@ class AtomicLongGauge(entity: Entity, name: String, val measurementUnit: Measure currentValue.set(value) def snapshot(): SingleValueSnapshot = - SingleValueSnapshot(name, measurementUnit, currentValue.get()) + SingleValueSnapshot(name, tags, measurementUnit, currentValue.get()) } diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala index 76d4ab65..29fe8c69 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala @@ -1,9 +1,10 @@ -package kamon.metric.instrument +package kamon +package metric +package instrument import java.nio.ByteBuffer import com.typesafe.scalalogging.StrictLogging -import kamon.metric.Entity import kamon.util.MeasurementUnit import org.HdrHistogram.{AtomicHistogramExtension, ZigZag} @@ -16,8 +17,8 @@ trait Histogram { } -class HdrHistogram(entity: Entity, name: String, val measurementUnit: MeasurementUnit, val dynamicRange: DynamicRange) - extends AtomicHistogramExtension(dynamicRange) with Histogram with DistributionSnapshotInstrument with StrictLogging { +class HdrHistogram(name: String, tags: Map[String, String], val measurementUnit: MeasurementUnit, val dynamicRange: DynamicRange) + extends AtomicHistogramExtension(dynamicRange) with SnapshotableHistogram with StrictLogging { def record(value: Long): Unit = tryRecord(value, 1) @@ -30,7 +31,7 @@ class HdrHistogram(entity: Entity, name: String, val measurementUnit: Measuremen recordValueWithCount(value, count) } catch { case anyException: Throwable ⇒ - logger.warn(s"Failed to store value [$value] in histogram [$name] of entity [$entity]. You might need to change " + + logger.warn(s"Failed to store value [$value] in histogram [$name]. You might need to change " + "your dynamic range configuration for this instrument.", anyException) } } @@ -81,7 +82,7 @@ class HdrHistogram(entity: Entity, name: String, val measurementUnit: Measuremen val distribution = new ZigZagCountsDistribution(totalCount, minIndex, maxIndex, ByteBuffer.wrap(zigZagCounts), protectedUnitMagnitude(), protectedSubBucketHalfCount(), protectedSubBucketHalfCountMagnitude()) - DistributionSnapshot(name, measurementUnit, dynamicRange, distribution) + DistributionSnapshot(name, tags, measurementUnit, dynamicRange, distribution) } private class ZigZagCountsDistribution(val count: Long, minIndex: Int, maxIndex: Int, zigZagCounts: ByteBuffer, diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala index 33a34bdf..0e0536c6 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala @@ -9,49 +9,36 @@ import kamon.metric.instrument.InstrumentFactory.CustomInstrumentSettings import kamon.util.MeasurementUnit -private[kamon] class InstrumentFactory private ( - defaultHistogramDynamicRange: DynamicRange, - defaultMMCounterDynamicRange: DynamicRange, - defaultMMCounterSampleRate: Duration, - customSettings: Map[(String, String), CustomInstrumentSettings]) { +private[kamon] class InstrumentFactory private (defaultHistogramDynamicRange: DynamicRange, defaultMMCounterDynamicRange: DynamicRange, + defaultMMCounterSampleInterval: Duration, customSettings: Map[String, CustomInstrumentSettings]) { - def buildHistogram(entity: Entity, name: String, dynamicRange: DynamicRange = defaultHistogramDynamicRange, - measurementUnit: MeasurementUnit = MeasurementUnit.none): Histogram with DistributionSnapshotInstrument = { + println("DEFAULT: " + defaultHistogramDynamicRange) - new HdrHistogram( - entity, - name, - measurementUnit, - instrumentDynamicRange(entity, name, dynamicRange) - ) - } + def buildHistogram(dynamicRange: Option[DynamicRange])(name: String, tags: Map[String, String], unit: MeasurementUnit): SnapshotableHistogram = + new HdrHistogram(name, tags, unit, instrumentDynamicRange(name, dynamicRange.getOrElse(defaultHistogramDynamicRange))) - def buildMinMaxCounter(entity: Entity, name: String, dynamicRange: DynamicRange = defaultMMCounterDynamicRange, - sampleInterval: Duration = defaultMMCounterSampleRate, measurementUnit: MeasurementUnit = MeasurementUnit.none): MinMaxCounter with DistributionSnapshotInstrument = { - - val underlyingHistogram = buildHistogram(entity, name, dynamicRange, measurementUnit) + def buildMinMaxCounter(dynamicRange: Option[DynamicRange], sampleInterval: Option[Duration]) + (name: String, tags: Map[String, String], unit: MeasurementUnit): SnapshotableMinMaxCounter = new PaddedMinMaxCounter( - entity, name, - underlyingHistogram, - instrumentSampleInterval(entity, name, sampleInterval) - ) - } + tags, + buildHistogram(dynamicRange.orElse(Some(defaultMMCounterDynamicRange)))(name, tags, unit), + instrumentSampleInterval(name, sampleInterval.getOrElse(defaultMMCounterSampleInterval)) ) - def buildGauge(entity: Entity, name: String, measurementUnit: MeasurementUnit = MeasurementUnit.none): Gauge with SingleValueSnapshotInstrument = - new AtomicLongGauge(entity, name, measurementUnit) + def buildGauge(name: String, tags: Map[String, String], unit: MeasurementUnit): SnapshotableGauge = + new AtomicLongGauge(name, tags, unit) - def buildCounter(entity: Entity, name: String, measurementUnit: MeasurementUnit = MeasurementUnit.none): Counter with SingleValueSnapshotInstrument = - new LongAdderCounter(entity, name, measurementUnit) + def buildCounter(name: String, tags: Map[String, String], unit: MeasurementUnit): SnapshotableCounter = + new LongAdderCounter(name, tags, unit) - private def instrumentDynamicRange(entity: Entity, instrumentName: String, dynamicRange: DynamicRange): DynamicRange = - customSettings.get((entity.category, instrumentName)).fold(dynamicRange) { cs => + private def instrumentDynamicRange(instrumentName: String, dynamicRange: DynamicRange): DynamicRange = + customSettings.get(instrumentName).fold(dynamicRange) { cs => overrideDynamicRange(dynamicRange, cs) } - private def instrumentSampleInterval(entity: Entity, instrumentName: String, sampleInterval: Duration): Duration = - customSettings.get((entity.category, instrumentName)).fold(sampleInterval) { cs => + private def instrumentSampleInterval(instrumentName: String, sampleInterval: Duration): Duration = + customSettings.get(instrumentName).fold(sampleInterval) { cs => cs.sampleInterval.getOrElse(sampleInterval) } @@ -73,21 +60,26 @@ object InstrumentFactory { val customSettings = factoryConfig.getConfig("custom-settings") .configurations - .filter(nonEmptyCategories) - .flatMap(buildCustomInstrumentSettings) + .filter(nonEmptySection) + .map(readCustomInstrumentSettings) new InstrumentFactory(histogramDynamicRange, mmCounterDynamicRange, mmCounterSampleInterval, customSettings) } - private def nonEmptyCategories(entry: (String, Config)): Boolean = entry match { + private def nonEmptySection(entry: (String, Config)): Boolean = entry match { case (_, config) => config.firstLevelKeys.nonEmpty } - private def buildCustomInstrumentSettings(entry: (String, Config)): Map[(String, String), CustomInstrumentSettings] = { - val (category, categoryConfig) = entry - categoryConfig.configurations.map { - case (instrumentName, instrumentConfig) => (category, instrumentName) -> readCustomSettings(instrumentConfig) - } + private def readCustomInstrumentSettings(entry: (String, Config)): (String, CustomInstrumentSettings) = { + val (metricName, metricConfig) = entry + val customSettings = CustomInstrumentSettings( + if (metricConfig.hasPath("lowest-discernible-value")) Some(metricConfig.getLong("lowest-discernible-value")) else None, + if (metricConfig.hasPath("highest-trackable-value")) Some(metricConfig.getLong("highest-trackable-value")) else None, + if (metricConfig.hasPath("significant-value-digits")) Some(metricConfig.getInt("significant-value-digits")) else None, + if (metricConfig.hasPath("sample-interval")) Some(metricConfig.getDuration("sample-interval")) else None + ) + + (metricName -> customSettings) } private def readDynamicRange(config: Config): DynamicRange = @@ -103,12 +95,4 @@ object InstrumentFactory { significantValueDigits: Option[Int], sampleInterval: Option[Duration] ) - - private def readCustomSettings(config: Config): CustomInstrumentSettings = - CustomInstrumentSettings( - if (config.hasPath("lowest-discernible-value")) Some(config.getLong("lowest-discernible-value")) else None, - if (config.hasPath("highest-trackable-value")) Some(config.getLong("highest-trackable-value")) else None, - if (config.hasPath("significant-value-digits")) Some(config.getInt("significant-value-digits")) else None, - if (config.hasPath("sample-interval")) Some(config.getDuration("sample-interval")) else None - ) } \ No newline at end of file diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSnapshot.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSnapshot.scala index ffb00080..1364c2d8 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSnapshot.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSnapshot.scala @@ -6,13 +6,14 @@ import kamon.util.MeasurementUnit * Snapshot for instruments that internally track a single value. Meant to be used for counters and gauges. * */ -case class SingleValueSnapshot(name: String, measurementUnit: MeasurementUnit, value: Long) +case class SingleValueSnapshot(name: String, tags: Map[String, String], measurementUnit: MeasurementUnit, value: Long) /** * Snapshot for instruments that internally the distribution of values in a defined dynamic range. Meant to be used * with histograms and min max counters. */ -case class DistributionSnapshot(name: String, measurementUnit: MeasurementUnit, dynamicRange: DynamicRange, distribution: Distribution) +case class DistributionSnapshot(name: String, tags: Map[String, String], measurementUnit: MeasurementUnit, + dynamicRange: DynamicRange, distribution: Distribution) trait Distribution { @@ -48,3 +49,9 @@ trait DistributionSnapshotInstrument { trait SingleValueSnapshotInstrument { private[kamon] def snapshot(): SingleValueSnapshot } + +trait SnapshotableHistogram extends Histogram with DistributionSnapshotInstrument +trait SnapshotableMinMaxCounter extends MinMaxCounter with DistributionSnapshotInstrument +trait SnapshotableCounter extends Counter with SingleValueSnapshotInstrument +trait SnapshotableGauge extends Gauge with SingleValueSnapshotInstrument + diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala index cddd8ed9..70094b7b 100644 --- a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala +++ b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala @@ -5,7 +5,6 @@ import java.time.Duration import java.util.concurrent.atomic.{AtomicLong, AtomicReference} import kamon.jsr166.LongMaxUpdater -import kamon.metric.Entity import kamon.util.MeasurementUnit trait MinMaxCounter { @@ -21,8 +20,8 @@ trait MinMaxCounter { } -class PaddedMinMaxCounter(entity: Entity, name: String, underlyingHistogram: Histogram with DistributionSnapshotInstrument, - val sampleInterval: Duration) extends MinMaxCounter with DistributionSnapshotInstrument { +class PaddedMinMaxCounter(name: String, tags: Map[String, String], underlyingHistogram: Histogram with DistributionSnapshotInstrument, + val sampleInterval: Duration) extends SnapshotableMinMaxCounter { private val min = new LongMaxUpdater(0L) private val max = new LongMaxUpdater(0L) diff --git a/kamon-core/src/main/scala/kamon/package.scala b/kamon-core/src/main/scala/kamon/package.scala index b65022bc..64a0a72e 100644 --- a/kamon-core/src/main/scala/kamon/package.scala +++ b/kamon-core/src/main/scala/kamon/package.scala @@ -35,6 +35,10 @@ package object kamon { } } + implicit class PrettyPrintTags(val tags: Map[String, String]) extends AnyVal { + def prettyPrint(): String = + tags.map { case (k, v) => k + "=" + v } mkString("{", ",", "}") + } /** * Workaround to the non thread-safe [[scala.collection.concurrent.TrieMap#getOrElseUpdate()]] method. More details on diff --git a/kamon-core/src/main/scala/kamon/trace/Span.scala b/kamon-core/src/main/scala/kamon/trace/Span.scala index 904c0a22..e64d8118 100644 --- a/kamon-core/src/main/scala/kamon/trace/Span.scala +++ b/kamon-core/src/main/scala/kamon/trace/Span.scala @@ -1,11 +1,8 @@ package kamon package trace -import kamon.metric.{Entity, RecorderRegistry} -import kamon.metric.instrument.DynamicRange - import scala.collection.JavaConverters._ -import kamon.util.{Clock, MeasurementUnit} +import kamon.util.Clock object Span { val MetricCategory = "span" @@ -29,7 +26,7 @@ object Span { class Span(spanContext: SpanContext, initialOperationName: String, initialTags: Map[String, String], startTimestampMicros: Long, - recorderRegistry: RecorderRegistry, reporterRegistry: ReporterRegistryImpl) extends io.opentracing.Span { + recorderRegistry: Any, reporterRegistry: ReporterRegistryImpl) extends io.opentracing.Span { private var isOpen: Boolean = true private val sampled: Boolean = spanContext.sampled @@ -156,17 +153,17 @@ class Span(spanContext: SpanContext, initialOperationName: String, initialTags: private def recordSpanMetrics(): Unit = { val elapsedTime = endTimestampMicros - startTimestampMicros - val entity = Entity(operationName, Span.MetricCategory, metricTags) - val recorder = recorderRegistry.getRecorder(entity) - - recorder - .histogram(Span.LatencyMetricName, MeasurementUnit.time.microseconds, DynamicRange.Default) - .record(elapsedTime) - - tags.get("error").foreach { errorTag => - if(errorTag != null && errorTag.equals(Span.BooleanTagTrueValue)) { - recorder.counter(Span.ErrorMetricName).increment() - } - } +// val entity = Entity(operationName, Span.MetricCategory, metricTags) +// val recorder = recorderRegistry.getRecorder(entity) + +// recorder +// .histogram(Span.LatencyMetricName, MeasurementUnit.time.microseconds, DynamicRange.Default) +// .record(elapsedTime) +// +// tags.get("error").foreach { errorTag => +// if(errorTag != null && errorTag.equals(Span.BooleanTagTrueValue)) { +// recorder.counter(Span.ErrorMetricName).increment() +// } +// } } } \ No newline at end of file diff --git a/kamon-core/src/main/scala/kamon/trace/Tracer.scala b/kamon-core/src/main/scala/kamon/trace/Tracer.scala index 6bb5a252..ed42b810 100644 --- a/kamon-core/src/main/scala/kamon/trace/Tracer.scala +++ b/kamon-core/src/main/scala/kamon/trace/Tracer.scala @@ -7,12 +7,11 @@ import io.opentracing.propagation.{TextMap, Format} import io.opentracing.propagation.Format.Builtin.{BINARY, HTTP_HEADERS, TEXT_MAP} import io.opentracing.util.ThreadLocalActiveSpanSource import kamon.ReporterRegistryImpl -import kamon.metric.{Entity, EntityRecorder, RecorderRegistry} import kamon.util.Clock -class Tracer(metrics: RecorderRegistry, reporterRegistry: ReporterRegistryImpl) extends io.opentracing.Tracer { +class Tracer(metrics: Any, reporterRegistry: ReporterRegistryImpl) extends io.opentracing.Tracer { private val logger = Logger(classOf[Tracer]) - private val metricsRecorder = new TracerMetricsRecorder(metrics.getRecorder(Entity("tracer", "tracer", Map.empty))) + ///private val metricsRecorder = new TracerMetricsRecorder(metrics.getRecorder(Entity("tracer", "tracer", Map.empty))) private val activeSpanSource = new ThreadLocalActiveSpanSource() @volatile private var sampler: Sampler = Sampler.never @@ -118,15 +117,12 @@ class Tracer(metrics: RecorderRegistry, reporterRegistry: ReporterRegistryImpl) new SpanContext(traceID, traceID, 0L, sampler.decide(traceID), initialTags) } - metricsRecorder.createdSpans.increment() - new Span(spanContext, operationName, initialTags, startTimestampMicros, metrics, reporterRegistry) + //metricsRecorder.createdSpans.increment() + new Span(spanContext, operationName, initialTags, startTimestampMicros, ???, reporterRegistry) } private def createID(): Long = ThreadLocalRandom.current().nextLong() } - private class TracerMetricsRecorder(recorder: EntityRecorder) { - val createdSpans = recorder.counter("created-spans") - } } -- cgit v1.2.3