aboutsummaryrefslogtreecommitdiff
path: root/kamon-core/src/legacy-test
diff options
context:
space:
mode:
Diffstat (limited to 'kamon-core/src/legacy-test')
-rw-r--r--kamon-core/src/legacy-test/resources/application.conf26
-rw-r--r--kamon-core/src/legacy-test/resources/logback.xml12
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/KamonLifecycleSpec.scala93
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/MetricScaleDecoratorSpec.scala102
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/SimpleMetricsSpec.scala106
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/SubscriptionsProtocolSpec.scala150
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala97
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/TraceMetricsSpec.scala121
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/instrument/CounterSpec.scala79
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/instrument/GaugeSpec.scala79
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/instrument/HistogramSpec.scala157
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala140
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/metric/instrument/UnitOfMeasurementSpec.scala98
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/testkit/BaseKamonSpec.scala70
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/trace/SamplerSpec.scala76
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/trace/SimpleTraceSpec.scala167
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/trace/TraceContextManipulationSpec.scala113
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/trace/TraceLocalSpec.scala108
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/trace/logging/MdcKeysSupportSpec.scala44
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/util/GlobPathFilterSpec.scala73
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/util/NeedToScaleSpec.scala67
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/util/RegexPathFilterSpec.scala59
-rw-r--r--kamon-core/src/legacy-test/scala/kamon/util/executors/ExecutorServiceMetricsSpec.scala75
23 files changed, 2112 insertions, 0 deletions
diff --git a/kamon-core/src/legacy-test/resources/application.conf b/kamon-core/src/legacy-test/resources/application.conf
new file mode 100644
index 00000000..bf6123d9
--- /dev/null
+++ b/kamon-core/src/legacy-test/resources/application.conf
@@ -0,0 +1,26 @@
+akka {
+ loggers = ["akka.event.slf4j.Slf4jLogger"]
+}
+
+kamon {
+ metric {
+ tick-interval = 1 hour
+ default-collection-context-buffer-size = 100
+ }
+
+ trace {
+ level-of-detail = simple-trace
+ sampling = all
+ }
+
+ default-tags = {
+ name = "jason"
+ number = 42
+ username = ${USER}
+ list = [1, 2, 3] // lists do not make sense for a tag
+ object = {
+ nested-bool = true
+ nested-string = "a string"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/resources/logback.xml b/kamon-core/src/legacy-test/resources/logback.xml
new file mode 100644
index 00000000..b467456f
--- /dev/null
+++ b/kamon-core/src/legacy-test/resources/logback.xml
@@ -0,0 +1,12 @@
+<configuration>
+ <statusListener class="ch.qos.logback.core.status.NopStatusListener"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="OFF">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/scala/kamon/KamonLifecycleSpec.scala b/kamon-core/src/legacy-test/scala/kamon/KamonLifecycleSpec.scala
new file mode 100644
index 00000000..2fbd1b71
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/KamonLifecycleSpec.scala
@@ -0,0 +1,93 @@
+package kamon
+
+import akka.actor.ActorSystem
+import akka.testkit.{TestKitBase, TestProbe}
+import com.typesafe.config.ConfigFactory
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.metric.{EntitySnapshot, SubscriptionsDispatcher}
+import kamon.util.LazyActorRef
+import org.scalatest.{Matchers, WordSpecLike}
+import org.scalactic.TimesOnInt._
+
+import scala.concurrent.duration._
+
+class KamonLifecycleSpec extends TestKitBase with WordSpecLike with Matchers {
+ override implicit lazy val system: ActorSystem = ActorSystem("kamon-lifecycle-spec")
+
+ "The Kamon lifecycle" should {
+ "allow Kamon to be used before it gets started" in {
+ val someMetric = Kamon.metrics.histogram("allow-me-before-start")
+ }
+
+ "allow Kamon to be started/shutdown several times" in {
+ 10 times {
+ Kamon.shutdown()
+ Kamon.start()
+ Kamon.start()
+ Kamon.shutdown()
+ Kamon.shutdown()
+ }
+ }
+
+ "not dispatch subscriptions before Kamon startup" in {
+ val subscriber = TestProbe()
+ Kamon.metrics.histogram("only-after-startup").record(100)
+ Kamon.metrics.subscribe("**", "**", subscriber.ref, permanently = true)
+
+ flushSubscriptions()
+ subscriber.expectNoMsg(300 millis)
+
+ Kamon.metrics.histogram("only-after-startup").record(100)
+ Kamon.start()
+ flushSubscriptions()
+ subscriber.expectMsgType[TickMetricSnapshot]
+ Kamon.shutdown()
+ }
+
+ "not dispatch subscriptions after Kamon shutdown" in {
+ val subscriber = TestProbe()
+ Kamon.start()
+ Kamon.metrics.histogram("only-before-shutdown").record(100)
+ Kamon.metrics.subscribe("**", "**", subscriber.ref, permanently = true)
+
+ flushSubscriptions()
+ subscriber.expectMsgType[TickMetricSnapshot]
+
+ Kamon.metrics.histogram("only-before-shutdown").record(100)
+ Kamon.shutdown()
+ Thread.sleep(500)
+ flushSubscriptions()
+ subscriber.expectNoMsg(300 millis)
+ }
+
+ "reconfigure filters after being started" in {
+ val customConfig = ConfigFactory.parseString(
+ """
+ |kamon.metric.filters.histogram {
+ | includes = [ "**" ]
+ | excludes = ["untracked-histogram"]
+ |}
+ """.stripMargin
+ )
+
+ Kamon.metrics.shouldTrack("untracked-histogram", "histogram") shouldBe true
+ Kamon.start(customConfig.withFallback(ConfigFactory.load()))
+ Kamon.metrics.shouldTrack("untracked-histogram", "histogram") shouldBe false
+
+ }
+ }
+
+ def takeSnapshotOf(name: String, category: String): EntitySnapshot = {
+ val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ val recorder = Kamon.metrics.find(name, category).get
+ recorder.collect(collectionContext)
+ }
+
+ def flushSubscriptions(): Unit = {
+ val subscriptionsField = Kamon.metrics.getClass.getDeclaredField("_subscriptions")
+ subscriptionsField.setAccessible(true)
+ val subscriptions = subscriptionsField.get(Kamon.metrics).asInstanceOf[LazyActorRef]
+
+ subscriptions.tell(SubscriptionsDispatcher.Tick)
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/MetricScaleDecoratorSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/MetricScaleDecoratorSpec.scala
new file mode 100644
index 00000000..d3d0f4fb
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/MetricScaleDecoratorSpec.scala
@@ -0,0 +1,102 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.metric.instrument.{InstrumentFactory, Memory, Time, UnitOfMeasurement}
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
+import org.scalatest.OptionValues._
+
+class MetricScaleDecoratorSpec extends BaseKamonSpec("metrics-scale-decorator-spec") with SnapshotFixtures {
+ "the MetricScaleDecorator" when {
+ "receives a snapshot" which {
+
+ val scaleDecorator = system.actorOf(MetricScaleDecorator.props(
+ Some(Time.Milliseconds), Some(Memory.KiloBytes), testActor
+ ))
+ "is empty" should {
+ "do nothing for empty snapshots" in {
+ scaleDecorator ! emptySnapshot
+ expectMsg(emptySnapshot)
+ }
+ }
+ "is non empty" should {
+ scaleDecorator ! nonEmptySnapshot
+ val scaled = expectMsgType[TickMetricSnapshot]
+ val snapshot = scaled.metrics(testEntity)
+
+ "scale time metrics" in {
+ snapshot.histogram("nano-time").value.max should be(10L +- 1L)
+ snapshot.counter("micro-time").value.count should be(1000L)
+ }
+ "scale memory metrics" in {
+ snapshot.histogram("byte-memory").value.max should be(1)
+ snapshot.counter("kbyte-memory").value.count should be(100L)
+ }
+ "do nothing with unknown metrics" in {
+ snapshot.histogram("unknown-histogram").value.max should be(1000L)
+ snapshot.counter("unknown-counter").value.count should be(10L)
+ }
+ "not change from and to" in {
+ scaled.from.millis should be(1000)
+ scaled.to.millis should be(2000)
+ }
+ }
+ }
+ }
+}
+
+trait SnapshotFixtures {
+ self: BaseKamonSpec ⇒
+
+ class ScaleDecoratorTestMetrics(instrumentFactory: InstrumentFactory)
+ extends GenericEntityRecorder(instrumentFactory) {
+ val nanoTime = histogram("nano-time", Time.Nanoseconds)
+ val microTime = counter("micro-time", Time.Microseconds)
+ val byteMemory = histogram("byte-memory", Memory.Bytes)
+ val kbyteMemory = counter("kbyte-memory", Memory.KiloBytes)
+ val unknownHistogram = histogram("unknown-histogram", UnitOfMeasurement.Unknown)
+ val unknownCounter = counter("unknown-counter", UnitOfMeasurement.Unknown)
+ }
+
+ object ScaleDecoratorTestMetrics extends EntityRecorderFactory[ScaleDecoratorTestMetrics] {
+ override def category: String = "decorator-spec"
+
+ override def createRecorder(instrumentFactory: InstrumentFactory): ScaleDecoratorTestMetrics =
+ new ScaleDecoratorTestMetrics(instrumentFactory)
+ }
+
+ val testEntity = Entity("metrics-scale-decorator-spec", "decorator-spec")
+ val recorder = Kamon.metrics.entity(ScaleDecoratorTestMetrics, "metrics-scale-decorator-spec")
+
+ val emptySnapshot = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map.empty)
+
+ recorder.unknownCounter.increment(10)
+ recorder.unknownHistogram.record(1000L)
+ recorder.nanoTime.record(10000000L)
+ recorder.microTime.increment(1000000L)
+ recorder.byteMemory.record(1024L)
+ recorder.kbyteMemory.increment(100L)
+
+ val nonEmptySnapshot = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
+ (testEntity → recorder.collect(collectionContext))
+ ))
+
+}
+
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/SimpleMetricsSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/SimpleMetricsSpec.scala
new file mode 100644
index 00000000..6d753888
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/SimpleMetricsSpec.scala
@@ -0,0 +1,106 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class SimpleMetricsSpec extends BaseKamonSpec("simple-metrics-spec") {
+
+ "the SimpleMetrics extension" should {
+
+ "allow registering a fully configured Histogram and get the same Histogram if registering again" in {
+ val histogramA = Kamon.metrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = Kamon.metrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+
+ histogramA shouldBe theSameInstanceAs(histogramB)
+ }
+
+ "return the original Histogram when registering a fully configured Histogram for second time but with different settings" in {
+ val histogramA = Kamon.metrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = Kamon.metrics.histogram("histogram-with-settings", DynamicRange(1, 50000, 2))
+
+ histogramA shouldBe theSameInstanceAs(histogramB)
+ }
+
+ "allow registering a Histogram that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon.metrics.histogram("histogram-with-default-configuration")
+ }
+
+ "allow registering a Counter and get the same Counter if registering again" in {
+ val counterA = Kamon.metrics.counter("counter")
+ val counterB = Kamon.metrics.counter("counter")
+
+ counterA shouldBe theSameInstanceAs(counterB)
+ }
+
+ "allow registering a fully configured MinMaxCounter and get the same MinMaxCounter if registering again" in {
+ val minMaxCounterA = Kamon.metrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = Kamon.metrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+
+ minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
+ }
+
+ "return the original MinMaxCounter when registering a fully configured MinMaxCounter for second time but with different settings" in {
+ val minMaxCounterA = Kamon.metrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = Kamon.metrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 50000, 2), 1 second)
+
+ minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
+ }
+
+ "allow registering a MinMaxCounter that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon.metrics.minMaxCounter("min-max-counter-with-default-configuration")
+ }
+
+ "allow registering a fully configured Gauge and get the same Gauge if registering again" in {
+ val gaugeA = Kamon.metrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second)(1L)
+ val gaugeB = Kamon.metrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second)(1L)
+
+ gaugeA shouldBe theSameInstanceAs(gaugeB)
+ }
+
+ "return the original Gauge when registering a fully configured Gauge for second time but with different settings" in {
+ val gaugeA = Kamon.metrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second)(1L)
+ val gaugeB = Kamon.metrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second)(1L)
+
+ gaugeA shouldBe theSameInstanceAs(gaugeB)
+ }
+
+ "allow registering a Gauge that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon.metrics.gauge("gauge-with-default-configuration")(2L)
+ }
+
+ "allow un-registering user metrics" in {
+ val counter = Kamon.metrics.counter("counter-for-remove")
+ val histogram = Kamon.metrics.histogram("histogram-for-remove")
+ val minMaxCounter = Kamon.metrics.minMaxCounter("min-max-counter-for-remove")
+ val gauge = Kamon.metrics.gauge("gauge-for-remove")(2L)
+
+ Kamon.metrics.removeCounter("counter-for-remove")
+ Kamon.metrics.removeHistogram("histogram-for-remove")
+ Kamon.metrics.removeMinMaxCounter("min-max-counter-for-remove")
+ Kamon.metrics.removeGauge("gauge-for-remove")
+
+ counter should not be (theSameInstanceAs(Kamon.metrics.counter("counter-for-remove")))
+ histogram should not be (theSameInstanceAs(Kamon.metrics.histogram("histogram-for-remove")))
+ minMaxCounter should not be (theSameInstanceAs(Kamon.metrics.minMaxCounter("min-max-counter-for-remove")))
+ gauge should not be (theSameInstanceAs(Kamon.metrics.gauge("gauge-for-remove")(2L)))
+ }
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/SubscriptionsProtocolSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
new file mode 100644
index 00000000..ee34acc7
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
@@ -0,0 +1,150 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor._
+import akka.testkit.{TestProbe, ImplicitSender}
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class SubscriptionsProtocolSpec extends BaseKamonSpec("subscriptions-protocol-spec") with ImplicitSender {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ """.stripMargin
+ )
+
+ lazy val metricsModule = Kamon.metrics
+ import metricsModule.{entity, subscribe, unsubscribe}
+ val defaultTags: Map[String, String] = Kamon.metrics.defaultTags
+
+ "the Subscriptions messaging protocol" should {
+ "allow subscribing for a single tick" in {
+ val subscriber = TestProbe()
+ entity(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = false)
+
+ flushSubscriptions()
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace", defaultTags))
+
+ flushSubscriptions()
+ subscriber.expectNoMsg(1 second)
+ }
+
+ "subscriptions should include default tags" in {
+ val subscriber = TestProbe()
+
+ Kamon.metrics.histogram("histogram-with-tags").record(1)
+ Kamon.metrics.subscribe("**", "**", subscriber.ref, permanently = true)
+ flushSubscriptions()
+
+ val tickSubscription = subscriber.expectMsgType[TickMetricSnapshot]
+ tickSubscription.metrics.head._1.tags.get("name") shouldBe Some("jason")
+ tickSubscription.metrics.head._1.tags.get("number") shouldBe Some("42")
+ tickSubscription.metrics.head._1.tags.get("username").isDefined shouldBe true
+ tickSubscription.metrics.head._1.tags.get("object.nested-bool") shouldBe Some("true")
+ tickSubscription.metrics.head._1.tags.get("object.nested-string") shouldBe Some("a string")
+ tickSubscription.metrics.head._1.tags.get("list") shouldBe None
+ }
+
+ "allow subscribing permanently to a metric" in {
+ val subscriber = TestProbe()
+ entity(TraceMetrics, "permanent")
+ subscribe("trace", "permanent", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ flushSubscriptions()
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(Entity("permanent", "trace", defaultTags))
+ }
+ }
+
+ "allow subscribing to metrics matching a glob pattern" in {
+ val subscriber = TestProbe()
+ entity(TraceMetrics, "include-one")
+ entity(TraceMetrics, "exclude-two")
+ entity(TraceMetrics, "include-three")
+ subscribe("trace", "include-*", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ flushSubscriptions()
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(2)
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace", defaultTags))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace", defaultTags))
+ }
+ }
+
+ "send a single TickMetricSnapshot to each subscriber, even if subscribed multiple times" in {
+ val subscriber = TestProbe()
+ entity(TraceMetrics, "include-one")
+ entity(TraceMetrics, "exclude-two")
+ entity(TraceMetrics, "include-three")
+ subscribe("trace", "include-one", subscriber.ref, permanently = true)
+ subscribe("trace", "include-three", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ flushSubscriptions()
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(2)
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace", defaultTags))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace", defaultTags))
+ }
+ }
+
+ "allow un-subscribing a subscriber" in {
+ val subscriber = TestProbe()
+ entity(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = true)
+
+ flushSubscriptions()
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace", defaultTags))
+
+ unsubscribe(subscriber.ref)
+
+ flushSubscriptions()
+ subscriber.expectNoMsg(1 second)
+ }
+ }
+
+ def subscriptionsActor: ActorRef = {
+ val listener = TestProbe()
+ system.actorSelection("/user/kamon/kamon-metrics").tell(Identify(1), listener.ref)
+ listener.expectMsgType[ActorIdentity].ref.get
+ }
+}
+
+class ForwarderSubscriber(target: ActorRef) extends Actor {
+ def receive = {
+ case anything ⇒ target.forward(anything)
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
new file mode 100644
index 00000000..6172a9e0
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
@@ -0,0 +1,97 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.MutableRecord
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
+import akka.testkit.ImplicitSender
+import scala.concurrent.duration._
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+
+class TickMetricSnapshotBufferSpec extends BaseKamonSpec("trace-metrics-spec") with ImplicitSender {
+
+ "the TickMetricSnapshotBuffer" should {
+ "merge TickMetricSnapshots received until the flush timeout is reached and fix the from/to fields" in new SnapshotFixtures {
+ val buffer = system.actorOf(TickMetricSnapshotBuffer.props(3 seconds, testActor))
+
+ buffer ! firstEmpty
+ buffer ! secondEmpty
+ buffer ! thirdEmpty
+
+ within(2 seconds)(expectNoMsg())
+ val mergedSnapshot = expectMsgType[TickMetricSnapshot]
+
+ mergedSnapshot.from.millis should equal(1000)
+ mergedSnapshot.to.millis should equal(4000)
+ mergedSnapshot.metrics should be('empty)
+ }
+
+ "merge empty and non-empty snapshots" in new SnapshotFixtures {
+ val buffer = system.actorOf(TickMetricSnapshotBuffer.props(3 seconds, testActor))
+
+ buffer ! firstNonEmpty
+ buffer ! secondNonEmpty
+ buffer ! thirdEmpty
+
+ within(2 seconds)(expectNoMsg())
+ val mergedSnapshot = expectMsgType[TickMetricSnapshot]
+
+ mergedSnapshot.from.millis should equal(1000)
+ mergedSnapshot.to.millis should equal(4000)
+ mergedSnapshot.metrics should not be ('empty)
+
+ val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).histogram("elapsed-time").get
+ testMetricSnapshot.min should equal(10)
+ testMetricSnapshot.max should equal(300)
+ testMetricSnapshot.numberOfMeasurements should equal(6)
+ testMetricSnapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(10, 3),
+ MutableRecord(20, 1),
+ MutableRecord(30, 1),
+ MutableRecord(300, 1)
+ )
+
+ }
+ }
+
+ trait SnapshotFixtures {
+ val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ val testTraceIdentity = Entity("buffer-spec-test-trace", "trace")
+ val traceRecorder = Kamon.metrics.entity(TraceMetrics, "buffer-spec-test-trace")
+
+ val firstEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map.empty)
+ val secondEmpty = TickMetricSnapshot(new MilliTimestamp(2000), new MilliTimestamp(3000), Map.empty)
+ val thirdEmpty = TickMetricSnapshot(new MilliTimestamp(3000), new MilliTimestamp(4000), Map.empty)
+
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(20L)
+ traceRecorder.elapsedTime.record(30L)
+ val firstNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
+ (testTraceIdentity → traceRecorder.collect(collectionContext))
+ ))
+
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(300L)
+ val secondNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
+ (testTraceIdentity → traceRecorder.collect(collectionContext))
+ ))
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/TraceMetricsSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/TraceMetricsSpec.scala
new file mode 100644
index 00000000..4c44dc07
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/TraceMetricsSpec.scala
@@ -0,0 +1,121 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2016 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.testkit.ImplicitSender
+import com.typesafe.config.ConfigFactory
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.Tracer
+import kamon.metric.instrument.Histogram
+
+import scala.util.control.NoStackTrace
+
+class TraceMetricsSpec extends BaseKamonSpec("trace-metrics-spec") with ImplicitSender {
+
+ "the TraceMetrics" should {
+ "record the elapsed time between a trace creation and finish" in {
+ for (repetitions ← 1 to 10) {
+ Tracer.withContext(newContext("record-elapsed-time")) {
+ Tracer.currentContext.finish()
+ }
+ }
+
+ val snapshot = takeSnapshotOf("record-elapsed-time", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(10)
+ }
+
+ "record the elapsed time for segments that occur inside a given trace" in {
+ Tracer.withContext(newContext("trace-with-segments")) {
+ val segment = Tracer.currentContext.startSegment("test-segment", "test-category", "test-library")
+ segment.finish()
+ Tracer.currentContext.finish()
+ }
+
+ val snapshot = takeSnapshotOf("test-segment", "trace-segment",
+ tags = Map(
+ "trace" → "trace-with-segments",
+ "category" → "test-category",
+ "library" → "test-library"
+ ))
+
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ }
+
+ "record the elapsed time for segments that finish after their correspondent trace has finished" in {
+ val segment = Tracer.withContext(newContext("closing-segment-after-trace")) {
+ val s = Tracer.currentContext.startSegment("test-segment", "test-category", "test-library")
+ Tracer.currentContext.finish()
+ s
+ }
+
+ val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace", "trace")
+ beforeFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+
+ intercept[NoSuchElementException] {
+ // The segment metric should not exist before we it has finished.
+
+ takeSnapshotOf("test-segment", "trace-segment",
+ tags = Map(
+ "trace" → "closing-segment-after-trace",
+ "category" → "test-category",
+ "library" → "test-library"
+ ))
+ }
+
+ segment.finish()
+
+ val afterFinishSegmentSnapshot = takeSnapshotOf("test-segment", "trace-segment",
+ tags = Map(
+ "trace" → "closing-segment-after-trace",
+ "category" → "test-category",
+ "library" → "test-library"
+ ))
+
+ afterFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ }
+
+ "record the elapsed time between a trace creation and finish with an error" in {
+ for (repetitions ← 1 to 10) {
+ Tracer.withContext(newContext("record-elapsed-time-with-error")) {
+ Tracer.currentContext.finishWithError(new RuntimeException("awesome-trace-error") with NoStackTrace)
+ }
+ }
+
+ val snapshot = takeSnapshotOf("record-elapsed-time-with-error", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(10)
+ snapshot.counter("errors").get.count should be(10)
+ }
+
+ "record the elapsed time for segments that finish with an error and that occur inside a given trace" in {
+ Tracer.withContext(newContext("trace-with-segments")) {
+ val segment = Tracer.currentContext.startSegment("test-segment-with-error", "test-category", "test-library")
+ segment.finishWithError(new RuntimeException("awesome-segment-error") with NoStackTrace)
+ Tracer.currentContext.finish()
+ }
+
+ val snapshot = takeSnapshotOf("test-segment-with-error", "trace-segment",
+ tags = Map(
+ "trace" → "trace-with-segments",
+ "category" → "test-category",
+ "library" → "test-library"
+ ))
+
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ snapshot.counter("errors").get.count should be(1)
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/instrument/CounterSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/CounterSpec.scala
new file mode 100644
index 00000000..d5d651e5
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/CounterSpec.scala
@@ -0,0 +1,79 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import org.scalatest.{Matchers, WordSpec}
+
+class CounterSpec extends WordSpec with Matchers {
+
+ "a Counter" should {
+ "allow increment only operations" in new CounterFixture {
+ counter.increment()
+ counter.increment(10)
+
+ intercept[UnsupportedOperationException] {
+ counter.increment(-10)
+ }
+ }
+
+ "reset to zero when a snapshot is taken" in new CounterFixture {
+ counter.increment(100)
+ takeSnapshotFrom(counter).count should be(100)
+ takeSnapshotFrom(counter).count should be(0)
+ takeSnapshotFrom(counter).count should be(0)
+
+ counter.increment(50)
+ takeSnapshotFrom(counter).count should be(50)
+ takeSnapshotFrom(counter).count should be(0)
+ }
+
+ "produce a snapshot that can be merged with others" in new CounterFixture {
+ val counterA = Counter()
+ val counterB = Counter()
+ counterA.increment(100)
+ counterB.increment(200)
+
+ val counterASnapshot = takeSnapshotFrom(counterA)
+ val counterBSnapshot = takeSnapshotFrom(counterB)
+
+ counterASnapshot.merge(counterBSnapshot, collectionContext).count should be(300)
+ counterBSnapshot.merge(counterASnapshot, collectionContext).count should be(300)
+ }
+
+ "produce a snapshot that can be scaled" in new CounterFixture {
+ counter.increment(100)
+
+ val counterSnapshot = takeSnapshotFrom(counter)
+
+ val scaledSnapshot = counterSnapshot.scale(Time.Milliseconds, Time.Microseconds)
+ scaledSnapshot.count should be(100000)
+ }
+
+ }
+
+ trait CounterFixture {
+ val counter = Counter()
+
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(1)
+ }
+
+ def takeSnapshotFrom(counter: Counter): Counter.Snapshot = counter.collect(collectionContext)
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/instrument/GaugeSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/GaugeSpec.scala
new file mode 100644
index 00000000..ec07d66c
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/GaugeSpec.scala
@@ -0,0 +1,79 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.util.concurrent.atomic.AtomicLong
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class GaugeSpec extends BaseKamonSpec("gauge-spec") {
+
+ "a Gauge" should {
+ "automatically record the current value using the configured refresh-interval" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
+ Thread.sleep(1.second.toMillis)
+
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ gauge.cleanup
+ }
+
+ "stop automatically recording after a call to cleanup" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
+ Thread.sleep(1.second.toMillis)
+
+ gauge.cleanup
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ Thread.sleep(1.second.toMillis)
+
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ }
+
+ "produce a Histogram snapshot including all the recorded values" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
+
+ Thread.sleep(1.second.toMillis)
+ gauge.cleanup
+ val snapshot = gauge.collect(Kamon.metrics.buildDefaultCollectionContext)
+
+ snapshot.numberOfMeasurements should be(10L +- 1L)
+ snapshot.min should be(1)
+ snapshot.max should be(10L +- 1L)
+ }
+
+ "not record the current value when doing a collection" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge(10 seconds)
+
+ val snapshot = gauge.collect(Kamon.metrics.buildDefaultCollectionContext)
+ snapshot.numberOfMeasurements should be(0)
+ numberOfValuesRecorded.get() should be(0)
+ }
+ }
+
+ trait GaugeFixture {
+ def createGauge(refreshInterval: FiniteDuration = 100 millis): (AtomicLong, Gauge) = {
+ val recordedValuesCounter = new AtomicLong(0)
+ val gauge = Gauge(DynamicRange(1, 100, 2), refreshInterval, Kamon.metrics.settings.refreshScheduler, {
+ () ⇒ recordedValuesCounter.addAndGet(1)
+ })
+
+ (recordedValuesCounter, gauge)
+ }
+
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/instrument/HistogramSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/HistogramSpec.scala
new file mode 100644
index 00000000..9551c6ea
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/HistogramSpec.scala
@@ -0,0 +1,157 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import kamon.metric.instrument.Histogram.DynamicRange
+import org.scalatest.{Matchers, WordSpec}
+
+import scala.util.Random
+
+class HistogramSpec extends WordSpec with Matchers {
+
+ "a Histogram" should {
+ "allow record values within the configured range" in new HistogramFixture {
+ histogram.record(1000)
+ histogram.record(5000, count = 100)
+ histogram.record(10000)
+ }
+
+ "not fail when recording values higher than the highest trackable value" in new HistogramFixture {
+ histogram.record(Long.MaxValue)
+ }
+
+ "reset all recorded levels to zero after a snapshot collection" in new HistogramFixture {
+ histogram.record(100)
+ histogram.record(200)
+ histogram.record(300)
+
+ takeSnapshot().numberOfMeasurements should be(3)
+ takeSnapshot().numberOfMeasurements should be(0)
+ }
+
+ "produce a snapshot" which {
+ "supports min, max, percentile, sum, numberOfMeasurements and recordsIterator operations" in new HistogramFixture {
+ histogram.record(100)
+ histogram.record(200, count = 200)
+ histogram.record(300)
+ histogram.record(900)
+
+ val snapshot = takeSnapshot()
+
+ snapshot.min should equal(100L +- 1L)
+ snapshot.max should equal(900L +- 9L)
+ snapshot.percentile(50.0D) should be(200)
+ snapshot.percentile(99.5D) should be(300)
+ snapshot.percentile(99.9D) should be(900)
+ snapshot.sum should be(41300)
+ snapshot.numberOfMeasurements should be(203)
+
+ val records = snapshot.recordsIterator.map(r ⇒ r.level → r.count).toSeq
+ records.size should be(4)
+ records(0) should be(100 → 1)
+ records(1) should be(200 → 200)
+ records(2) should be(300 → 1)
+ records(3) should be(900 → 1)
+ }
+
+ "can be scaled" in new HistogramFixture {
+ histogram.record(100)
+ histogram.record(200, count = 200)
+ histogram.record(300)
+ histogram.record(900)
+
+ val snapshot = takeSnapshot().scale(Time.Seconds, Time.Milliseconds)
+
+ snapshot.min should equal(100000L +- 1000L)
+ snapshot.max should equal(900000L +- 9000L)
+ snapshot.percentile(50.0D) should be(200000)
+ snapshot.percentile(99.5D) should be(300000)
+ snapshot.percentile(99.9D) should be(900000)
+ snapshot.sum should be(41300000)
+ snapshot.numberOfMeasurements should be(203)
+
+ val records = snapshot.recordsIterator.map(r ⇒ r.level → r.count).toSeq
+ records.size should be(4)
+ records(0) should be(100000 → 1)
+ records(1) should be(200000 → 200)
+ records(2) should be(300000 → 1)
+ records(3) should be(900000 → 1)
+ }
+
+ "can be merged with another snapshot" in new MultipleHistogramFixture {
+ val random = new Random(System.nanoTime())
+
+ for (repetitions ← 1 to 1000) {
+ // Put some values on A and Control
+ for (_ ← 1 to 1000) {
+ val newRecording = random.nextInt(100000)
+ controlHistogram.record(newRecording)
+ histogramA.record(newRecording)
+ }
+
+ // Put some values on B and Control
+ for (_ ← 1 to 2000) {
+ val newRecording = random.nextInt(100000)
+ controlHistogram.record(newRecording)
+ histogramB.record(newRecording)
+ }
+
+ val controlSnapshot = takeSnapshotFrom(controlHistogram)
+ val histogramASnapshot = takeSnapshotFrom(histogramA)
+ val histogramBSnapshot = takeSnapshotFrom(histogramB)
+
+ assertEquals(controlSnapshot, histogramASnapshot.merge(histogramBSnapshot, collectionContext))
+ assertEquals(controlSnapshot, histogramBSnapshot.merge(histogramASnapshot, collectionContext))
+ }
+ }
+ }
+ }
+
+ trait HistogramFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ val histogram = Histogram(DynamicRange(1, 100000, 2))
+
+ def takeSnapshot(): Histogram.Snapshot = histogram.collect(collectionContext)
+ }
+
+ trait MultipleHistogramFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ val controlHistogram = Histogram(DynamicRange(1, 100000, 2))
+ val histogramA = Histogram(DynamicRange(1, 100000, 2))
+ val histogramB = Histogram(DynamicRange(1, 100000, 2))
+
+ def takeSnapshotFrom(histogram: Histogram): InstrumentSnapshot = histogram.collect(collectionContext)
+
+ def assertEquals(left: InstrumentSnapshot, right: InstrumentSnapshot): Unit = {
+ val leftSnapshot = left.asInstanceOf[Histogram.Snapshot]
+ val rightSnapshot = right.asInstanceOf[Histogram.Snapshot]
+
+ leftSnapshot.numberOfMeasurements should equal(rightSnapshot.numberOfMeasurements)
+ leftSnapshot.min should equal(rightSnapshot.min)
+ leftSnapshot.max should equal(rightSnapshot.max)
+ leftSnapshot.recordsIterator.toStream should contain theSameElementsAs (rightSnapshot.recordsIterator.toStream)
+ }
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
new file mode 100644
index 00000000..d007d4cd
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
@@ -0,0 +1,140 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import akka.actor._
+import akka.testkit.TestProbe
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.{DynamicRange, MutableRecord}
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class MinMaxCounterSpec extends BaseKamonSpec("min-max-counter-spec") {
+
+ "the MinMaxCounter" should {
+ "track ascending tendencies" in new MinMaxCounterFixture {
+ mmCounter.increment()
+ mmCounter.increment(3)
+ mmCounter.increment()
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(5)
+ snapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min
+ MutableRecord(5, 2)
+ ) // max and current
+ }
+
+ "track descending tendencies" in new MinMaxCounterFixture {
+ mmCounter.increment(5)
+ mmCounter.decrement()
+ mmCounter.decrement(3)
+ mmCounter.decrement()
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(5)
+ snapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 2), // min and current
+ MutableRecord(5, 1)
+ ) // max
+ }
+
+ "reset the min and max to the current value after taking a snapshot" in new MinMaxCounterFixture {
+ mmCounter.increment(5)
+ mmCounter.decrement(3)
+
+ val firstSnapshot = collectCounterSnapshot()
+
+ firstSnapshot.min should be(0)
+ firstSnapshot.max should be(5)
+ firstSnapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min
+ MutableRecord(2, 1), // current
+ MutableRecord(5, 1)
+ ) // max
+
+ val secondSnapshot = collectCounterSnapshot()
+
+ secondSnapshot.min should be(2)
+ secondSnapshot.max should be(2)
+ secondSnapshot.recordsIterator.toStream should contain(
+ MutableRecord(2, 3)
+ ) // min, max and current
+ }
+
+ "report zero as the min and current values if the current value fell bellow zero" in new MinMaxCounterFixture {
+ mmCounter.decrement(3)
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(0)
+ snapshot.recordsIterator.toStream should contain(
+ MutableRecord(0, 3)
+ ) // min, max and current (even while current really is -3
+ }
+
+ "never record values bellow zero in very busy situations" in new MinMaxCounterFixture {
+ val monitor = TestProbe()
+ val workers = for (workers ← 1 to 50) yield {
+ system.actorOf(Props(new MinMaxCounterUpdateActor(mmCounter, monitor.ref)))
+ }
+
+ workers foreach (_ ! "increment")
+ for (refresh ← 1 to 1000) {
+ collectCounterSnapshot()
+ Thread.sleep(1)
+ }
+
+ monitor.expectNoMsg()
+ workers foreach (_ ! PoisonPill)
+ }
+ }
+
+ trait MinMaxCounterFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(64)
+ }
+
+ val mmCounter = MinMaxCounter(DynamicRange(1, 1000, 2), 1 hour, Kamon.metrics.settings.refreshScheduler)
+ mmCounter.cleanup // cancel the refresh schedule
+
+ def collectCounterSnapshot(): Histogram.Snapshot = mmCounter.collect(collectionContext)
+ }
+}
+
+class MinMaxCounterUpdateActor(mmc: MinMaxCounter, monitor: ActorRef) extends Actor {
+ val x = Array.ofDim[Int](4)
+ def receive = {
+ case "increment" ⇒
+ mmc.increment()
+ self ! "decrement"
+ case "decrement" ⇒
+ mmc.decrement()
+ self ! "increment"
+ try {
+ mmc.refreshValues()
+ } catch {
+ case _: IndexOutOfBoundsException ⇒ monitor ! "failed"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/scala/kamon/metric/instrument/UnitOfMeasurementSpec.scala b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/UnitOfMeasurementSpec.scala
new file mode 100644
index 00000000..7133579e
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/metric/instrument/UnitOfMeasurementSpec.scala
@@ -0,0 +1,98 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import kamon.metric.instrument.UnitOfMeasurement.Unknown
+import org.scalatest.{Matchers, WordSpec}
+
+class UnitOfMeasurementSpec extends WordSpec with Matchers {
+
+ "Time unit" should {
+ "resolve Time Unit by valid name" in {
+ Time("s") should be(Time.Seconds)
+ Time("n") should be(Time.Nanoseconds)
+ Time("ms") should be(Time.Milliseconds)
+ Time("µs") should be(Time.Microseconds)
+ }
+ "fail to resolve Time Unit by invalid name" in {
+ val ex = intercept[IllegalArgumentException](Time("boo"))
+ ex.getMessage should be("Can't recognize time unit 'boo'")
+ }
+ "scale time properly" in {
+ val epsilon = 0.0001
+
+ Time.Nanoseconds.scale(Time.Nanoseconds)(1000000D) should be(1000000D +- epsilon)
+ Time.Nanoseconds.scale(Time.Microseconds)(1000000D) should be(1000D +- epsilon)
+ Time.Nanoseconds.scale(Time.Milliseconds)(1000000D) should be(1D +- epsilon)
+ Time.Nanoseconds.scale(Time.Seconds)(1000000D) should be(0.001D +- epsilon)
+ Time.Seconds.scale(Time.Nanoseconds)(1D) should be(1000000000D +- epsilon)
+ }
+ "allow scale only time" in {
+ intercept[IllegalArgumentException](Time.Nanoseconds.tryScale(Unknown)(100))
+ .getMessage should be("Can't scale different types of units `time` and `unknown`")
+ intercept[IllegalArgumentException](Time.Nanoseconds.tryScale(Memory.Bytes)(100))
+ .getMessage should be("Can't scale different types of units `time` and `bytes`")
+ val epsilon = 0.0001
+
+ Time.Nanoseconds.tryScale(Time.Nanoseconds)(100D) should be(100D +- epsilon)
+ }
+ }
+
+ "Memory unit" should {
+ "resolve Memory Unit by valid name" in {
+ Memory("b") should be(Memory.Bytes)
+ Memory("Kb") should be(Memory.KiloBytes)
+ Memory("Mb") should be(Memory.MegaBytes)
+ Memory("Gb") should be(Memory.GigaBytes)
+ }
+ "fail to resolve Memory Unit by invalid name" in {
+ val ex = intercept[IllegalArgumentException](Memory("boo"))
+ ex.getMessage should be("Can't recognize memory unit 'boo'")
+ }
+ "scale memory properly" in {
+ val epsilon = 0.0001
+
+ Memory.Bytes.scale(Memory.Bytes)(1000000D) should be(1000000D +- epsilon)
+ Memory.Bytes.scale(Memory.KiloBytes)(1000000D) should be(976.5625D +- epsilon)
+ Memory.Bytes.scale(Memory.MegaBytes)(1000000D) should be(0.9536D +- epsilon)
+ Memory.Bytes.scale(Memory.GigaBytes)(1000000D) should be(9.3132E-4D +- epsilon)
+ Memory.MegaBytes.scale(Memory.Bytes)(1D) should be(1048576D +- epsilon)
+ }
+ "allow scale only memory" in {
+ intercept[IllegalArgumentException](Memory.Bytes.tryScale(Unknown)(100))
+ .getMessage should be("Can't scale different types of units `bytes` and `unknown`")
+ intercept[IllegalArgumentException](Memory.Bytes.tryScale(Time.Nanoseconds)(100))
+ .getMessage should be("Can't scale different types of units `bytes` and `time`")
+ val epsilon = 0.0001
+
+ Memory.Bytes.tryScale(Memory.Bytes)(100D) should be(100D +- epsilon)
+ }
+
+ }
+
+ "Unknown unit" should {
+ "allow scale only Unknown" in {
+ intercept[IllegalArgumentException](Unknown.tryScale(Memory.Bytes)(100))
+ .getMessage should be("Can't scale different types of units `unknown` and `bytes`")
+ intercept[IllegalArgumentException](Unknown.tryScale(Time.Nanoseconds)(100))
+ .getMessage should be("Can't scale different types of units `unknown` and `time`")
+
+ Unknown.scale(Unknown)(100D) should be(100D)
+ }
+
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/testkit/BaseKamonSpec.scala b/kamon-core/src/legacy-test/scala/kamon/testkit/BaseKamonSpec.scala
new file mode 100644
index 00000000..995f15fa
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/testkit/BaseKamonSpec.scala
@@ -0,0 +1,70 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.testkit
+
+import akka.actor.ActorSystem
+import akka.testkit.{ImplicitSender, TestKitBase}
+import com.typesafe.config.Config
+import kamon.{ActorSystemTools, Kamon}
+import kamon.metric.{Entity, EntitySnapshot, SubscriptionsDispatcher}
+import kamon.trace.TraceContext
+import kamon.util.LazyActorRef
+import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
+
+abstract class BaseKamonSpec(actorSystemName: String) extends TestKitBase with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {
+ lazy val collectionContext = Kamon.metrics.buildDefaultCollectionContext
+ override implicit lazy val system: ActorSystem = {
+ Kamon.start(mergedConfig)
+ ActorSystem(actorSystemName, mergedConfig)
+ }
+
+ def config: Config = Kamon.config
+
+ def mergedConfig: Config = config.withFallback(Kamon.config)
+
+ def newContext(name: String): TraceContext =
+ Kamon.tracer.newContext(name)
+
+ def newContext(name: String, token: String): TraceContext =
+ Kamon.tracer.newContext(name, Option(token))
+
+ def newContext(name: String, token: String, tags: Map[String, String]): TraceContext =
+ Kamon.tracer.newContext(name, Option(token), tags)
+
+ def takeSnapshotOf(name: String, category: String): EntitySnapshot = {
+ val recorder = Kamon.metrics.find(name, category).get
+ recorder.collect(collectionContext)
+ }
+
+ def takeSnapshotOf(name: String, category: String, tags: Map[String, String]): EntitySnapshot = {
+ val recorder = Kamon.metrics.find(Entity(name, category, tags)).get
+ recorder.collect(collectionContext)
+ }
+
+ def flushSubscriptions(): Unit = {
+ val subscriptionsField = Kamon.metrics.getClass.getDeclaredField("_subscriptions")
+ subscriptionsField.setAccessible(true)
+ val subscriptions = subscriptionsField.get(Kamon.metrics).asInstanceOf[LazyActorRef]
+
+ subscriptions.tell(SubscriptionsDispatcher.Tick)
+ }
+
+ override protected def afterAll(): Unit = {
+ Kamon.shutdown()
+ ActorSystemTools.terminateActorSystem(system)
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/trace/SamplerSpec.scala b/kamon-core/src/legacy-test/scala/kamon/trace/SamplerSpec.scala
new file mode 100644
index 00000000..88cdf116
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/trace/SamplerSpec.scala
@@ -0,0 +1,76 @@
+package kamon.trace
+
+import kamon.testkit.BaseKamonSpec
+import kamon.util.NanoInterval
+
+class SamplerSpec extends BaseKamonSpec("sampler-spec") {
+
+ "the Sampler" should {
+ "work as intended" when {
+ "using all mode" in {
+ val sampler = SampleAll
+
+ sampler.shouldTrace should be(true)
+
+ sampler.shouldReport(NanoInterval.default) should be(true)
+ }
+
+ "using random mode" in {
+ val sampler = new RandomSampler(100)
+
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+
+ sampler.shouldReport(NanoInterval.default) should be(true)
+ }
+
+ "using ordered mode" in {
+ var sampler = new OrderedSampler(1)
+
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(true)
+
+ sampler = new OrderedSampler(2)
+
+ sampler.shouldTrace should be(false)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(false)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(false)
+ sampler.shouldTrace should be(true)
+
+ sampler.shouldReport(NanoInterval.default) should be(true)
+ }
+
+ "using threshold mode" in {
+ val sampler = new ThresholdSampler(new NanoInterval(10000000L))
+
+ sampler.shouldTrace should be(true)
+
+ sampler.shouldReport(new NanoInterval(5000000L)) should be(false)
+ sampler.shouldReport(new NanoInterval(10000000L)) should be(true)
+ sampler.shouldReport(new NanoInterval(15000000L)) should be(true)
+ sampler.shouldReport(new NanoInterval(0L)) should be(false)
+ }
+
+ "using clock mode" in {
+ val sampler = new ClockSampler(new NanoInterval(10000000L))
+
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(false)
+ Thread.sleep(1L)
+ sampler.shouldTrace should be(false)
+ Thread.sleep(10L)
+ sampler.shouldTrace should be(true)
+ sampler.shouldTrace should be(false)
+
+ sampler.shouldReport(NanoInterval.default) should be(true)
+ }
+ }
+ }
+
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/trace/SimpleTraceSpec.scala b/kamon-core/src/legacy-test/scala/kamon/trace/SimpleTraceSpec.scala
new file mode 100644
index 00000000..fe7ad053
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/trace/SimpleTraceSpec.scala
@@ -0,0 +1,167 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2016 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import kamon.Kamon
+import kamon.testkit.BaseKamonSpec
+
+import scala.concurrent.duration._
+import scala.util.control.NoStackTrace
+
+class SimpleTraceSpec extends BaseKamonSpec("simple-trace-spec") {
+
+ "the simple tracing" should {
+ "send a TraceInfo when the trace has finished and all segments are finished" in {
+ Kamon.tracer.subscribe(testActor)
+
+ Tracer.withContext(newContext("simple-trace-without-segments")) {
+ Tracer.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ Tracer.currentContext.startSegment("segment-two", "test-segment", "test").finish()
+ Tracer.currentContext.finish()
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.segments.size should be(2)
+ traceInfo.segments.find(_.name == "segment-one") should be('defined)
+ traceInfo.segments.find(_.name == "segment-two") should be('defined)
+ }
+
+ "send a TraceInfo when the trace has finished with error and all segments are finished" in {
+ Kamon.tracer.subscribe(testActor)
+
+ Tracer.withContext(newContext("simple-trace-with-error-and-without-segments")) {
+ Tracer.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ Tracer.currentContext.startSegment("segment-two", "test-segment", "test").finish()
+ Tracer.currentContext.finishWithError(TraceException("awesome-trace-error"))
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-with-error-and-without-segments")
+ traceInfo.status should be(Status.FinishedWithError)
+ traceInfo.segments.size should be(2)
+ traceInfo.segments.find(_.name == "segment-one") should be('defined)
+ traceInfo.segments.find(_.name == "segment-two") should be('defined)
+ }
+
+ "send a TraceInfo when the trace has finished with error and all segments are finished with error" in {
+ Kamon.tracer.subscribe(testActor)
+
+ Tracer.withContext(newContext("simple-trace-with-error-and-without-segments")) {
+ Tracer.currentContext.startSegment("segment-one-finished-with-error", "test-segment", "test").finishWithError(SegmentException("awesome-segment-exception"))
+ Tracer.currentContext.startSegment("segment-two-finished-with-error", "test-segment", "test").finishWithError(SegmentException("awesome-segment-exception"))
+ Tracer.currentContext.finishWithError(TraceException("awesome-trace-error"))
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-with-error-and-without-segments")
+ traceInfo.status should be(Status.FinishedWithError)
+ traceInfo.segments.size should be(2)
+
+ val segmentOne = traceInfo.segments.find(_.name == "segment-one-finished-with-error")
+ val segmentTwo = traceInfo.segments.find(_.name == "segment-two-finished-with-error")
+
+ segmentOne.get.status should be(Status.FinishedWithError)
+ segmentTwo.get.status should be(Status.FinishedWithError)
+ }
+
+ "send a TraceInfo when the trace has finished and all segments are finished and both contains tags" in {
+ Kamon.tracer.subscribe(testActor)
+
+ Tracer.withContext(newContext("simple-trace-without-segments", "awesome-token", Map("environment" → "production"))) {
+ Tracer.currentContext.startSegment("segment-one", "test-segment", "test", Map("segment-one-info" → "info")).finish()
+ Tracer.currentContext.startSegment("segment-two", "test-segment", "test", Map("segment-two-info" → "info")).finish()
+ Tracer.currentContext.finish()
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.tags should be(Map("environment" → "production"))
+ traceInfo.segments.size should be(2)
+
+ val segmentOne = traceInfo.segments.find(_.name == "segment-one")
+ val segmentTwo = traceInfo.segments.find(_.name == "segment-two")
+
+ segmentOne.get.tags should be(Map("segment-one-info" → "info"))
+ segmentTwo.get.tags should be(Map("segment-two-info" → "info"))
+ }
+
+ "send a TraceInfo when the trace has finished and all segments are finished and contains added tag" in {
+ Kamon.tracer.subscribe(testActor)
+
+ Tracer.withContext(newContext("simple-trace-without-segments", "awesome-token")) {
+ Tracer.currentContext.addTag("environment", "production")
+ val segmentOne = Tracer.currentContext.startSegment("segment-one", "test-segment", "test")
+ segmentOne.addTag("segment-one-info", "info")
+ segmentOne.finish()
+ val segmentTwo = Tracer.currentContext.startSegment("segment-two", "test-segment", "test")
+ segmentTwo.addTag("segment-two-info", "info")
+ segmentTwo.finish()
+ Tracer.currentContext.finish()
+ }
+
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.tags should be(Map("environment" → "production"))
+ traceInfo.segments.size should be(2)
+
+ val segmentOne = traceInfo.segments.find(_.name == "segment-one")
+ val segmentTwo = traceInfo.segments.find(_.name == "segment-two")
+
+ segmentOne.get.tags should be(Map("segment-one-info" → "info"))
+ segmentTwo.get.tags should be(Map("segment-two-info" → "info"))
+ }
+
+ "incubate the tracing context if there are open segments after finishing" in {
+ Kamon.tracer.subscribe(testActor)
+
+ val secondSegment = Tracer.withContext(newContext("simple-trace-without-segments")) {
+ Tracer.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ val segment = Tracer.currentContext.startSegment("segment-two", "test-segment", "test")
+ Tracer.currentContext.finish()
+ segment
+ }
+
+ expectNoMsg(2 seconds)
+ secondSegment.finish()
+
+ within(10 seconds) {
+ val traceInfo = expectMsgType[TraceInfo]
+ Kamon.tracer.unsubscribe(testActor)
+
+ traceInfo.name should be("simple-trace-without-segments")
+ traceInfo.segments.size should be(2)
+ traceInfo.segments.find(_.name == "segment-one") should be('defined)
+ traceInfo.segments.find(_.name == "segment-two") should be('defined)
+ }
+ }
+
+ }
+}
+
+case class TraceException(message: String) extends RuntimeException(message) with NoStackTrace
+case class SegmentException(message: String) extends RuntimeException(message) with NoStackTrace \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/scala/kamon/trace/TraceContextManipulationSpec.scala b/kamon-core/src/legacy-test/scala/kamon/trace/TraceContextManipulationSpec.scala
new file mode 100644
index 00000000..d817186a
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/trace/TraceContextManipulationSpec.scala
@@ -0,0 +1,113 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import kamon.testkit.BaseKamonSpec
+
+class TraceContextManipulationSpec extends BaseKamonSpec("trace-metrics-spec") {
+
+ "the TraceContext api" should {
+ "allow starting a trace within a specified block of code, and only within that block of code" in {
+ val createdContext = Tracer.withContext(newContext("start-context")) {
+ Tracer.currentContext should not be empty
+ Tracer.currentContext
+ }
+
+ Tracer.currentContext shouldBe empty
+ createdContext.name shouldBe ("start-context")
+ }
+
+ "allow starting a trace within a specified block of code, providing a trace-token and only within that block of code" in {
+ val createdContext = Tracer.withContext(newContext("start-context-with-token", "token-1")) {
+ Tracer.currentContext should not be empty
+ Tracer.currentContext
+ }
+
+ Tracer.currentContext shouldBe empty
+ createdContext.name shouldBe ("start-context-with-token")
+ createdContext.token should be("token-1")
+ }
+
+ "allow providing a TraceContext and make it available within a block of code" in {
+ val createdContext = newContext("manually-provided-trace-context")
+
+ Tracer.currentContext shouldBe empty
+ Tracer.withContext(createdContext) {
+ Tracer.currentContext should be(createdContext)
+ }
+
+ Tracer.currentContext shouldBe empty
+ }
+
+ "allow renaming a trace" in {
+ val createdContext = Tracer.withContext(newContext("trace-before-rename")) {
+ Tracer.currentContext.rename("renamed-trace")
+ Tracer.currentContext
+ }
+
+ Tracer.currentContext shouldBe empty
+ createdContext.name shouldBe "renamed-trace"
+ }
+
+ "allow tagging and untagging a trace" in {
+ val createdContext = Tracer.withContext(newContext("trace-before-rename")) {
+ Tracer.currentContext.addTag("trace-tag", "tag-1")
+ Tracer.currentContext
+ }
+
+ Tracer.currentContext shouldBe empty
+ createdContext.tags shouldBe Map("trace-tag" → "tag-1")
+
+ createdContext.removeTag("trace-tag", "tag-1")
+
+ createdContext.tags shouldBe Map.empty
+ }
+
+ "allow creating a segment within a trace" in {
+ val createdContext = Tracer.withContext(newContext("trace-with-segments")) {
+ Tracer.currentContext.startSegment("segment-1", "segment-1-category", "segment-library")
+ Tracer.currentContext
+ }
+
+ Tracer.currentContext shouldBe empty
+ createdContext.name shouldBe "trace-with-segments"
+ }
+
+ "allow renaming a segment" in {
+ Tracer.withContext(newContext("trace-with-renamed-segment")) {
+ val segment = Tracer.currentContext.startSegment("original-segment-name", "segment-label", "segment-library")
+ segment.name should be("original-segment-name")
+
+ segment.rename("new-segment-name")
+ segment.name should be("new-segment-name")
+ }
+ }
+
+ "allow tagging and untagging a segment" in {
+ Tracer.withContext(newContext("trace-with-renamed-segment")) {
+ val segment = Tracer.currentContext.startSegment("segment-name", "segment-label", "segment-library")
+ segment.tags should be(Map.empty)
+
+ segment.addTag("segment-tag", "tag-1")
+ segment.tags should be(Map("segment-tag" → "tag-1"))
+
+ segment.removeTag("segment-tag", "tag-1")
+ segment.tags should be(Map.empty)
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/legacy-test/scala/kamon/trace/TraceLocalSpec.scala b/kamon-core/src/legacy-test/scala/kamon/trace/TraceLocalSpec.scala
new file mode 100644
index 00000000..41d5bc83
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/trace/TraceLocalSpec.scala
@@ -0,0 +1,108 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceLocal.AvailableToMdc
+import kamon.trace.logging.MdcKeysSupport
+import kamon.util.Supplier
+import org.scalatest.concurrent.PatienceConfiguration
+import org.scalatest.OptionValues
+import org.slf4j.MDC
+
+class TraceLocalSpec extends BaseKamonSpec("trace-local-spec") with PatienceConfiguration with OptionValues with MdcKeysSupport {
+ val SampleTraceLocalKeyAvailableToMDC = AvailableToMdc("someKey")
+
+ object SampleTraceLocalKey extends TraceLocal.TraceLocalKey[String]
+
+ "the TraceLocal storage" should {
+ "allow storing and retrieving values" in {
+ Tracer.withContext(newContext("store-and-retrieve-trace-local")) {
+ val testString = "Hello World"
+
+ TraceLocal.store(SampleTraceLocalKey)(testString)
+ TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
+ }
+ }
+
+ "return None when retrieving a non existent key" in {
+ Tracer.withContext(newContext("non-existent-key")) {
+ TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
+ }
+ }
+
+ "throws an exception when trying to get a non existent key" in {
+ Tracer.withContext(newContext("non-existent-key")) {
+ intercept[NoSuchElementException] {
+ TraceLocal.get(SampleTraceLocalKey)
+ }
+ }
+ }
+
+ "return the given value when retrieving a non existent key" in {
+ Tracer.withContext(newContext("non-existent-key")) {
+ TraceLocal.getOrElse(SampleTraceLocalKey, new Supplier[String] { def get = "optionalValue" }) should equal("optionalValue")
+ }
+ }
+
+ "return None when retrieving a key without a current TraceContext" in {
+ TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
+ }
+
+ "be attached to the TraceContext when it is propagated" in {
+ val testString = "Hello World"
+ val testContext = Tracer.withContext(newContext("manually-propagated-trace-local")) {
+ TraceLocal.store(SampleTraceLocalKey)(testString)
+ TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
+ Tracer.currentContext
+ }
+
+ /** No TraceLocal should be available here */
+ TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
+
+ Tracer.withContext(testContext) {
+ TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
+ }
+ }
+
+ "allow retrieve a value from the MDC when was created a key with AvailableToMdc(cool-key)" in {
+ Tracer.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
+ val testString = "Hello MDC"
+
+ TraceLocal.store(SampleTraceLocalKeyAvailableToMDC)(testString)
+ TraceLocal.retrieve(SampleTraceLocalKeyAvailableToMDC).value should equal(testString)
+
+ withMdc {
+ MDC.get("someKey") should equal(testString)
+ }
+ }
+ }
+
+ "allow retrieve a value from the MDC when was created a key with AvailableToMdc.storeForMdc(String, String)" in {
+ Tracer.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
+ val testString = "Hello MDC"
+
+ TraceLocal.storeForMdc("someKey", testString)
+ TraceLocal.retrieve(SampleTraceLocalKeyAvailableToMDC).value should equal(testString)
+
+ withMdc {
+ MDC.get("someKey") should equal(testString)
+ }
+ }
+ }
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/trace/logging/MdcKeysSupportSpec.scala b/kamon-core/src/legacy-test/scala/kamon/trace/logging/MdcKeysSupportSpec.scala
new file mode 100644
index 00000000..39374e79
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/trace/logging/MdcKeysSupportSpec.scala
@@ -0,0 +1,44 @@
+package kamon.trace.logging
+
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{EmptyTraceContext, Tracer}
+import org.slf4j.MDC
+
+class MdcKeysSupportSpec extends BaseKamonSpec("mdc-keys-support-spec") {
+
+ "Running code with MDC support" should {
+ "add nothing to the MDC" when {
+ "the trace context is empty" in {
+ // Given an empty trace context.
+ Tracer.withContext(EmptyTraceContext) {
+ // When some code is executed with MDC support.
+ MdcKeysSupport.withMdc {
+ // Then the MDC should not contain the trace token.
+ Option(MDC.get(MdcKeysSupport.traceTokenKey)) should be(None)
+ // Or name
+ Option(MDC.get(MdcKeysSupport.traceNameKey)) should be(None)
+ }
+ }
+ }
+ }
+ "add the trace token and name to the context" when {
+ "the trace context is not empty" in {
+ // Given a trace context.
+ Tracer.withNewContext("name", Some("token")) {
+ // When some code is executed with MDC support.
+ MdcKeysSupport.withMdc {
+ // Then the MDC should contain the trace token.
+ Option(MDC.get(MdcKeysSupport.traceTokenKey)) should be(Some("token"))
+ // And name
+ Option(MDC.get(MdcKeysSupport.traceNameKey)) should be(Some("name"))
+ }
+
+ // Then after code is executed the MDC should have been cleared.
+ Option(MDC.get(MdcKeysSupport.traceTokenKey)) should be(None)
+ Option(MDC.get(MdcKeysSupport.traceNameKey)) should be(None)
+ }
+ }
+ }
+ }
+
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/util/GlobPathFilterSpec.scala b/kamon-core/src/legacy-test/scala/kamon/util/GlobPathFilterSpec.scala
new file mode 100644
index 00000000..7d585087
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/util/GlobPathFilterSpec.scala
@@ -0,0 +1,73 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import org.scalatest.{Matchers, WordSpecLike}
+
+class GlobPathFilterSpec extends WordSpecLike with Matchers {
+ "The GlobPathFilter" should {
+
+ "match a single expression" in {
+ val filter = new GlobPathFilter("/user/actor")
+
+ filter.accept("/user/actor") shouldBe true
+
+ filter.accept("/user/actor/something") shouldBe false
+ filter.accept("/user/actor/somethingElse") shouldBe false
+ }
+
+ "match all expressions in the same level" in {
+ val filter = new GlobPathFilter("/user/*")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/otherActor") shouldBe true
+
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match all expressions in the same levelss" in {
+ val filter = new GlobPathFilter("**")
+
+ filter.accept("GET: /ping") shouldBe true
+ filter.accept("GET: /ping/pong") shouldBe true
+ }
+
+ "match all expressions and crosses the path boundaries" in {
+ val filter = new GlobPathFilter("/user/actor-**")
+
+ filter.accept("/user/actor-") shouldBe true
+ filter.accept("/user/actor-one") shouldBe true
+ filter.accept("/user/actor-one/other") shouldBe true
+
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match exactly one character" in {
+ val filter = new GlobPathFilter("/user/actor-?")
+
+ filter.accept("/user/actor-1") shouldBe true
+ filter.accept("/user/actor-2") shouldBe true
+ filter.accept("/user/actor-3") shouldBe true
+
+ filter.accept("/user/actor-one") shouldBe false
+ filter.accept("/user/actor-two") shouldBe false
+ filter.accept("/user/actor-tree") shouldBe false
+ }
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/util/NeedToScaleSpec.scala b/kamon-core/src/legacy-test/scala/kamon/util/NeedToScaleSpec.scala
new file mode 100644
index 00000000..cba6ad98
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/util/NeedToScaleSpec.scala
@@ -0,0 +1,67 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import com.typesafe.config.ConfigFactory
+import kamon.metric.instrument.{Memory, Time}
+import org.scalatest.{Matchers, WordSpec}
+
+class NeedToScaleSpec extends WordSpec with Matchers {
+
+ "NeedToScale" should {
+ "extract time unit to scale to from config" in {
+ val config = ConfigFactory.parseString(
+ """
+ |time-units = "ms"
+ """.stripMargin
+ )
+
+ config match {
+ case NeedToScale(timeUnits, memoryUnits) ⇒
+ timeUnits should be(Some(Time.Milliseconds))
+ memoryUnits should be(None)
+ }
+ }
+ "extract memory unit to scale to from config" in {
+ val config = ConfigFactory.parseString(
+ """
+ |memory-units = "kb"
+ """.stripMargin
+ )
+
+ config match {
+ case NeedToScale(timeUnits, memoryUnits) ⇒
+ timeUnits should be(None)
+ memoryUnits should be(Some(Memory.KiloBytes))
+ }
+ }
+ "extract nothing if config has no proper keys" in {
+ val config = ConfigFactory.parseString(
+ """
+ |some-other-key = "value"
+ """.stripMargin
+ )
+
+ config match {
+ case NeedToScale(timeUnits, memoryUnits) ⇒
+ fail("Should not match")
+ case _ ⇒
+ }
+ }
+ }
+
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/util/RegexPathFilterSpec.scala b/kamon-core/src/legacy-test/scala/kamon/util/RegexPathFilterSpec.scala
new file mode 100644
index 00000000..a2cc8629
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/util/RegexPathFilterSpec.scala
@@ -0,0 +1,59 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util
+
+import org.scalatest.{Matchers, WordSpecLike}
+
+class RegexPathFilterSpec extends WordSpecLike with Matchers {
+ "The RegexPathFilter" should {
+
+ "match a single expression" in {
+ val filter = new RegexPathFilter("/user/actor")
+
+ filter.accept("/user/actor") shouldBe true
+
+ filter.accept("/user/actor/something") shouldBe false
+ filter.accept("/user/actor/somethingElse") shouldBe false
+ }
+
+ "match arbitray expressions ending with wildcard" in {
+ val filter = new RegexPathFilter("/user/.*")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/otherActor") shouldBe true
+ filter.accept("/user/something/actor") shouldBe true
+ filter.accept("/user/something/otherActor") shouldBe true
+
+ filter.accept("/otheruser/actor") shouldBe false
+ filter.accept("/otheruser/otherActor") shouldBe false
+ filter.accept("/otheruser/something/actor") shouldBe false
+ filter.accept("/otheruser/something/otherActor") shouldBe false
+ }
+
+ "match numbers" in {
+ val filter = new RegexPathFilter("/user/actor-\\d")
+
+ filter.accept("/user/actor-1") shouldBe true
+ filter.accept("/user/actor-2") shouldBe true
+ filter.accept("/user/actor-3") shouldBe true
+
+ filter.accept("/user/actor-one") shouldBe false
+ filter.accept("/user/actor-two") shouldBe false
+ filter.accept("/user/actor-tree") shouldBe false
+ }
+ }
+}
diff --git a/kamon-core/src/legacy-test/scala/kamon/util/executors/ExecutorServiceMetricsSpec.scala b/kamon-core/src/legacy-test/scala/kamon/util/executors/ExecutorServiceMetricsSpec.scala
new file mode 100644
index 00000000..4e5394f8
--- /dev/null
+++ b/kamon-core/src/legacy-test/scala/kamon/util/executors/ExecutorServiceMetricsSpec.scala
@@ -0,0 +1,75 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2015 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.util.executors
+
+import java.util.concurrent.Executors
+
+import kamon.Kamon
+import kamon.metric.{Entity, EntityRecorder}
+import kamon.testkit.BaseKamonSpec
+
+class ExecutorServiceMetricsSpec extends BaseKamonSpec("executor-service-metrics-spec") {
+
+ "the ExecutorServiceMetrics" should {
+ "register a SingleThreadPool, collect their metrics and remove it" in {
+ val singleThreadPoolExecutor = Executors.newSingleThreadExecutor()
+ val singleThreadPoolExecutorEntity = ExecutorServiceMetrics.register("single-thread-pool", singleThreadPoolExecutor)
+ findExecutorRecorder(singleThreadPoolExecutorEntity) should not be empty
+
+ ExecutorServiceMetrics.remove(singleThreadPoolExecutorEntity)
+ findExecutorRecorder(singleThreadPoolExecutorEntity) should be(empty)
+ }
+
+ "register a ThreadPoolExecutor, collect their metrics and remove it" in {
+ val threadPoolExecutor = Executors.newCachedThreadPool()
+ val threadPoolExecutorEntity = ExecutorServiceMetrics.register("thread-pool-executor", threadPoolExecutor)
+ findExecutorRecorder(threadPoolExecutorEntity) should not be empty
+
+ ExecutorServiceMetrics.remove(threadPoolExecutorEntity)
+ findExecutorRecorder(threadPoolExecutorEntity) should be(empty)
+ }
+
+ "register a ScheduledThreadPoolExecutor, collect their metrics and remove it" in {
+ val scheduledThreadPoolExecutor = Executors.newSingleThreadScheduledExecutor()
+ val scheduledThreadPoolEntity = ExecutorServiceMetrics.register("scheduled-thread-pool-executor", scheduledThreadPoolExecutor)
+ findExecutorRecorder(scheduledThreadPoolEntity) should not be empty
+
+ ExecutorServiceMetrics.remove(scheduledThreadPoolEntity)
+ findExecutorRecorder(scheduledThreadPoolEntity) should be(empty)
+ }
+
+ "register a Java ForkJoinPool, collect their metrics and remove it" in {
+ val javaForkJoinPool = Executors.newWorkStealingPool()
+ val javaForkJoinPoolEntity = ExecutorServiceMetrics.register("java-fork-join-pool", javaForkJoinPool)
+ findExecutorRecorder(javaForkJoinPoolEntity) should not be empty
+
+ ExecutorServiceMetrics.remove(javaForkJoinPoolEntity)
+ findExecutorRecorder(javaForkJoinPoolEntity) should be(empty)
+ }
+
+ "register a Scala ForkJoinPool, collect their metrics and remove it" in {
+ val scalaForkJoinPool = new scala.concurrent.forkjoin.ForkJoinPool()
+ val scalaForkJoinPoolEntity = ExecutorServiceMetrics.register("scala-fork-join-pool", scalaForkJoinPool)
+ findExecutorRecorder(scalaForkJoinPoolEntity) should not be empty
+
+ ExecutorServiceMetrics.remove(scalaForkJoinPoolEntity)
+ findExecutorRecorder(scalaForkJoinPoolEntity) should be(empty)
+ }
+
+ def findExecutorRecorder(entity: Entity): Option[EntityRecorder] = Kamon.metrics.find(entity)
+ }
+}