aboutsummaryrefslogtreecommitdiff
path: root/kamon-core-tests
diff options
context:
space:
mode:
authorIvan Topolnjak <ivantopo@gmail.com>2017-08-15 00:33:06 +0200
committerIvan Topolnjak <ivantopo@gmail.com>2017-08-15 00:33:06 +0200
commita90d4aa75e7fdf12a85177f4e81463439bfe5bb3 (patch)
tree2b815c06862332752ff4192c4bdceb4413cf2945 /kamon-core-tests
parent86c72d622ac027dc96f9a744771c0a468d46dc60 (diff)
downloadKamon-a90d4aa75e7fdf12a85177f4e81463439bfe5bb3.tar.gz
Kamon-a90d4aa75e7fdf12a85177f4e81463439bfe5bb3.tar.bz2
Kamon-a90d4aa75e7fdf12a85177f4e81463439bfe5bb3.zip
separate the build into core, testkit and core-tests projects
Diffstat (limited to 'kamon-core-tests')
-rw-r--r--kamon-core-tests/src/test/scala/kamon/EnvironmentSpec.scala48
-rw-r--r--kamon-core-tests/src/test/scala/kamon/UtilsOnConfigSpec.scala36
-rw-r--r--kamon-core-tests/src/test/scala/kamon/context/ContextCodecSpec.scala18
-rw-r--r--kamon-core-tests/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala41
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/FilterSpec.scala72
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/GlobPathFilterSpec.scala72
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/HistogramSpec.scala94
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/LongAdderCounterSpec.scala62
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/MetricLookupSpec.scala62
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/MinMaxCounterSpec.scala90
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/RecorderRegistrySpec.scala58
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/RegexPathFilterSpec.scala61
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/TimerSpec.scala72
-rw-r--r--kamon-core-tests/src/test/scala/kamon/metric/instrument/InstrumentFactorySpec.scala114
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/B3SpanCodecSpec.scala192
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala52
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala86
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/LocalSpanSpec.scala100
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/SpanMetrics.scala64
-rw-r--r--kamon-core-tests/src/test/scala/kamon/trace/TracerSpec.scala103
20 files changed, 1497 insertions, 0 deletions
diff --git a/kamon-core-tests/src/test/scala/kamon/EnvironmentSpec.scala b/kamon-core-tests/src/test/scala/kamon/EnvironmentSpec.scala
new file mode 100644
index 00000000..2dee46ab
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/EnvironmentSpec.scala
@@ -0,0 +1,48 @@
+package kamon
+
+import com.typesafe.config.ConfigFactory
+import org.scalatest.{Matchers, WordSpec}
+
+class EnvironmentSpec extends WordSpec with Matchers {
+ private val baseConfig = ConfigFactory.parseString(
+ """
+ |kamon.environment {
+ | service = environment-spec
+ | host = auto
+ | instance = auto
+ |}
+ """.stripMargin
+ )
+
+ "the Kamon environment" should {
+ "assign a host and instance name when they are set to 'auto'" in {
+ val env = Environment.fromConfig(baseConfig)
+
+ env.host shouldNot be("auto")
+ env.instance shouldNot be("auto")
+ env.instance shouldBe s"environment-spec@${env.host}"
+ }
+
+ "use the configured host and instance, if provided" in {
+ val customConfig = ConfigFactory.parseString(
+ """
+ |kamon.environment {
+ | host = spec-host
+ | instance = spec-instance
+ |}
+ """.stripMargin)
+
+ val env = Environment.fromConfig(customConfig.withFallback(baseConfig))
+
+ env.host should be("spec-host")
+ env.instance should be("spec-instance")
+ }
+
+ "always return the same incarnation name" in {
+ val envOne = Environment.fromConfig(baseConfig)
+ val envTwo = Environment.fromConfig(baseConfig)
+
+ envOne.incarnation shouldBe envTwo.incarnation
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/UtilsOnConfigSpec.scala b/kamon-core-tests/src/test/scala/kamon/UtilsOnConfigSpec.scala
new file mode 100644
index 00000000..8b0e0790
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/UtilsOnConfigSpec.scala
@@ -0,0 +1,36 @@
+package kamon
+
+import com.typesafe.config.ConfigFactory
+import org.scalatest.{Matchers, WordSpec}
+
+class UtilsOnConfigSpec extends WordSpec with Matchers {
+ val config = ConfigFactory.parseString(
+ """
+ | kamon.test {
+ | configuration-one {
+ | setting = value
+ | other-setting = other-value
+ | }
+ |
+ | "config.two" {
+ | setting = value
+ | }
+ | }
+ """.stripMargin
+ )
+
+ "the utils on config syntax" should {
+ "list all top level keys with a configuration" in {
+ config.getConfig("kamon.test").topLevelKeys should contain only("configuration-one", "config.two")
+ }
+
+ "create a map from top level keys to the inner configuration objects"in {
+ val extractedConfigurations = config.getConfig("kamon.test").configurations
+
+ extractedConfigurations.keys should contain only("configuration-one", "config.two")
+ extractedConfigurations("configuration-one").topLevelKeys should contain only("setting", "other-setting")
+ extractedConfigurations("config.two").topLevelKeys should contain only("setting")
+ }
+ }
+
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/context/ContextCodecSpec.scala b/kamon-core-tests/src/test/scala/kamon/context/ContextCodecSpec.scala
new file mode 100644
index 00000000..11be85a7
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/context/ContextCodecSpec.scala
@@ -0,0 +1,18 @@
+package kamon.context
+
+import kamon.Kamon
+import org.scalatest.{Matchers, WordSpec}
+
+class ContextCodecSpec extends WordSpec with Matchers {
+ "the Context Codec" when {
+ "encoding/decoding to HttpHeaders" should {
+ "encode stuff" in {
+
+
+
+ }
+ }
+ }
+
+ val ContextCodec = new Codec(Kamon.identityProvider, Kamon.config())
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala b/kamon-core-tests/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala
new file mode 100644
index 00000000..39f316ba
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala
@@ -0,0 +1,41 @@
+package kamon.context
+
+
+import org.scalatest.{Matchers, WordSpec}
+
+class ThreadLocalStorageSpec extends WordSpec with Matchers {
+
+ "the Storage.ThreadLocal implementation of Context storage" should {
+ "return a empty context when no context has been set" in {
+ TLS.current() shouldBe Context.Empty
+ }
+
+ "return the empty value for keys that have not been set in the context" in {
+ TLS.current().get(TestKey) shouldBe 42
+ TLS.current().get(AnotherKey) shouldBe 99
+ TLS.current().get(BroadcastKey) shouldBe "i travel around"
+
+ ScopeWithKey.get(TestKey) shouldBe 43
+ ScopeWithKey.get(AnotherKey) shouldBe 99
+ ScopeWithKey.get(BroadcastKey) shouldBe "i travel around"
+ }
+
+ "allow setting a context as current and remove it when closing the Scope" in {
+ TLS.current() shouldBe Context.Empty
+
+ val scope = TLS.store(ScopeWithKey)
+ TLS.current() shouldBe theSameInstanceAs(ScopeWithKey)
+ scope.close()
+
+ TLS.current() shouldBe Context.Empty
+ }
+
+
+ }
+
+ val TLS: Storage = new Storage.ThreadLocal
+ val TestKey = Key.local("test-key", 42)
+ val AnotherKey = Key.local("another-key", 99)
+ val BroadcastKey = Key.broadcast("broadcast", "i travel around")
+ val ScopeWithKey = Context.create().withKey(TestKey, 43)
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/FilterSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/FilterSpec.scala
new file mode 100644
index 00000000..cda76dc2
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/FilterSpec.scala
@@ -0,0 +1,72 @@
+package kamon
+package metric
+
+import com.typesafe.config.ConfigFactory
+import org.scalatest.{Matchers, WordSpec}
+
+
+class FilterSpec extends WordSpec with Matchers {
+ val testConfig = ConfigFactory.parseString(
+ """
+ |kamon.util.filters {
+ |
+ | some-filter {
+ | includes = ["**"]
+ | excludes = ["not-me"]
+ | }
+ |
+ | only-includes {
+ | includes = ["only-me"]
+ | }
+ |
+ | only-excludes {
+ | excludes = ["not-me"]
+ | }
+ |
+ | specific-rules {
+ | includes = ["glob:/user/**", "regex:test-[0-5]"]
+ | }
+ |
+ | "filter.with.quotes" {
+ | includes = ["**"]
+ | excludes = ["not-me"]
+ | }
+ |}
+ """.stripMargin
+ )
+
+ Kamon.reconfigure(testConfig.withFallback(Kamon.config()))
+
+ "the entity filters" should {
+ "reject anything that doesn't match any configured filter" in {
+ Kamon.filter("not-a-filter", "hello") shouldBe false
+ }
+
+ "evaluate patterns for filters with includes and excludes" in {
+ Kamon.filter("some-filter", "anything") shouldBe true
+ Kamon.filter("some-filter", "some-other") shouldBe true
+ Kamon.filter("some-filter", "not-me") shouldBe false
+ }
+
+ "allow configuring includes only or excludes only for any filter" in {
+ Kamon.filter("only-includes", "only-me") shouldBe true
+ Kamon.filter("only-includes", "anything") shouldBe false
+ Kamon.filter("only-excludes", "any-other") shouldBe false
+ Kamon.filter("only-excludes", "not-me") shouldBe false
+ }
+
+ "allow to explicitly decide whether patterns are treated as Glob or Regex" in {
+ Kamon.filter("specific-rules", "/user/accepted") shouldBe true
+ Kamon.filter("specific-rules", "/other/rejected/") shouldBe false
+ Kamon.filter("specific-rules", "test-5") shouldBe true
+ Kamon.filter("specific-rules", "test-6") shouldBe false
+ }
+
+ "allow filters with quoted names" in {
+ Kamon.filter("filter.with.quotes", "anything") shouldBe true
+ Kamon.filter("filter.with.quotes", "some-other") shouldBe true
+ Kamon.filter("filter.with.quotes", "not-me") shouldBe false
+ }
+
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/GlobPathFilterSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/GlobPathFilterSpec.scala
new file mode 100644
index 00000000..c21b1256
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/GlobPathFilterSpec.scala
@@ -0,0 +1,72 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon
+package metric
+
+import kamon.util.GlobPathFilter
+import org.scalatest.{Matchers, WordSpecLike}
+
+class GlobPathFilterSpec extends WordSpecLike with Matchers {
+ "The GlobPathFilter" should {
+
+ "match a single expression" in {
+ val filter = new GlobPathFilter("/user/actor")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/actor/something") shouldBe false
+ filter.accept("/user/actor/somethingElse") shouldBe false
+ }
+
+ "match all expressions in the same level" in {
+ val filter = new GlobPathFilter("/user/*")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/otherActor") shouldBe true
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match any expressions when using double star alone (**)" in {
+ val filter = new GlobPathFilter("**")
+
+ filter.accept("GET: /ping") shouldBe true
+ filter.accept("GET: /ping/pong") shouldBe true
+ filter.accept("this-doesn't_look good but-passes") shouldBe true
+ }
+
+ "match all expressions and cross the path boundaries when using double star suffix (**)" in {
+ val filter = new GlobPathFilter("/user/actor-**")
+
+ filter.accept("/user/actor-") shouldBe true
+ filter.accept("/user/actor-one") shouldBe true
+ filter.accept("/user/actor-one/other") shouldBe true
+ filter.accept("/user/something/actor") shouldBe false
+ filter.accept("/user/something/otherActor") shouldBe false
+ }
+
+ "match exactly one character when using question mark (?)" in {
+ val filter = new GlobPathFilter("/user/actor-?")
+
+ filter.accept("/user/actor-1") shouldBe true
+ filter.accept("/user/actor-2") shouldBe true
+ filter.accept("/user/actor-3") shouldBe true
+ filter.accept("/user/actor-one") shouldBe false
+ filter.accept("/user/actor-two") shouldBe false
+ filter.accept("/user/actor-tree") shouldBe false
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/HistogramSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/HistogramSpec.scala
new file mode 100644
index 00000000..f0ea1292
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/HistogramSpec.scala
@@ -0,0 +1,94 @@
+package kamon.metric
+
+import kamon.Kamon
+import org.scalatest.{Matchers, WordSpec}
+import MeasurementUnit._
+
+
+class HistogramSpec extends WordSpec with Matchers {
+ import HistogramTestHelper.HistogramMetricSyntax
+
+ "a Histogram" should {
+ "record values and reset internal state when a snapshot is taken" in {
+ val histogram = Kamon.histogram("test", unit = time.nanoseconds)
+ histogram.record(100)
+ histogram.record(150, 998)
+ histogram.record(200)
+
+ val distribution = histogram.distribution()
+ distribution.min shouldBe(100)
+ distribution.max shouldBe(200)
+ distribution.count shouldBe(1000)
+ distribution.buckets.length shouldBe 3
+ distribution.buckets.map(b => (b.value, b.frequency)) should contain.allOf(
+ (100 -> 1),
+ (150 -> 998),
+ (200 -> 1)
+ )
+
+ val emptyDistribution = histogram.distribution()
+ emptyDistribution.min shouldBe(0)
+ emptyDistribution.max shouldBe(0)
+ emptyDistribution.count shouldBe(0)
+ emptyDistribution.buckets.length shouldBe 0
+ }
+
+ "accept a smallest discernible value configuration" in {
+ // The lowestDiscernibleValue gets rounded down to the closest power of 2, so, here it will be 64.
+ val histogram = Kamon.histogram("test-lowest-discernible-value", unit = time.nanoseconds, dynamicRange = DynamicRange.Fine.withLowestDiscernibleValue(100))
+ histogram.record(100)
+ histogram.record(200)
+ histogram.record(300)
+ histogram.record(1000)
+ histogram.record(2000)
+ histogram.record(3000)
+
+ val distribution = histogram.distribution()
+ distribution.min shouldBe(64)
+ distribution.max shouldBe(2944)
+ distribution.count shouldBe(6)
+ distribution.buckets.length shouldBe 6
+ distribution.buckets.map(b => (b.value, b.frequency)) should contain.allOf(
+ (64 -> 1),
+ (192 -> 1),
+ (256 -> 1),
+ (960 -> 1),
+ (1984 -> 1),
+ (2944 -> 1)
+ )
+ }
+
+ "[private api] record values and optionally keep the internal state when a snapshot is taken" in {
+ val histogram = Kamon.histogram("test", unit = time.nanoseconds)
+ histogram.record(100)
+ histogram.record(150, 998)
+ histogram.record(200)
+
+ val distribution = {
+ histogram.distribution(resetState = false) // first one gets discarded
+ histogram.distribution(resetState = false)
+ }
+
+ distribution.min shouldBe(100)
+ distribution.max shouldBe(200)
+ distribution.count shouldBe(1000)
+ distribution.buckets.length shouldBe 3
+ distribution.buckets.map(b => (b.value, b.frequency)) should contain.allOf(
+ (100 -> 1),
+ (150 -> 998),
+ (200 -> 1)
+ )
+ }
+ }
+}
+
+object HistogramTestHelper {
+
+ implicit class HistogramMetricSyntax(metric: HistogramMetric) {
+ def distribution(resetState: Boolean = true): Distribution =
+ metric.refine(Map.empty[String, String]) match {
+ case h: AtomicHdrHistogram => h.snapshot(resetState).distribution
+ case h: HdrHistogram => h.snapshot(resetState).distribution
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/LongAdderCounterSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/LongAdderCounterSpec.scala
new file mode 100644
index 00000000..4014d6df
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/LongAdderCounterSpec.scala
@@ -0,0 +1,62 @@
+/* =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import org.scalatest.{Matchers, WordSpec}
+
+class LongAdderCounterSpec extends WordSpec with Matchers {
+
+ "a LongAdderCounter" should {
+ "allow unit and bundled increments" in {
+ val counter = buildCounter("unit-increments")
+ counter.increment()
+ counter.increment()
+ counter.increment(40)
+
+ counter.snapshot().value shouldBe 42
+ }
+
+ "warn the user and ignore attempts to decrement the counter" in {
+ val counter = buildCounter("attempt-to-decrement")
+ counter.increment(100)
+ counter.increment(100)
+ counter.increment(100)
+
+ counter.snapshot().value shouldBe 300
+ }
+
+ "reset the internal state to zero after taking snapshots as a default behavior" in {
+ val counter = buildCounter("reset-after-snapshot")
+ counter.increment()
+ counter.increment(10)
+
+ counter.snapshot().value shouldBe 11
+ counter.snapshot().value shouldBe 0
+ }
+
+ "optionally leave the internal state unchanged" in {
+ val counter = buildCounter("reset-after-snapshot")
+ counter.increment()
+ counter.increment(10)
+
+ counter.snapshot(resetState = false).value shouldBe 11
+ counter.snapshot(resetState = false).value shouldBe 11
+ }
+ }
+
+ def buildCounter(name: String, tags: Map[String, String] = Map.empty, unit: MeasurementUnit = MeasurementUnit.none): LongAdderCounter =
+ new LongAdderCounter(name, tags, unit)
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/MetricLookupSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/MetricLookupSpec.scala
new file mode 100644
index 00000000..1d60a28f
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/MetricLookupSpec.scala
@@ -0,0 +1,62 @@
+package kamon.metric
+
+import kamon.Kamon
+import org.scalatest.{Matchers, WordSpec}
+
+class MetricLookupSpec extends WordSpec with Matchers {
+
+ "the Kamon companion object" can {
+ "lookup a metric and" should {
+ "always return the same histogram metric" in {
+ val histogramOne = Kamon.histogram("histogram-lookup")
+ val histogramTwo = Kamon.histogram("histogram-lookup")
+ histogramOne shouldBe theSameInstanceAs(histogramTwo)
+ }
+
+ "always return the same counter metric" in {
+ val counterOne = Kamon.counter("counter-lookup")
+ val counterTwo = Kamon.counter("counter-lookup")
+ counterOne shouldBe theSameInstanceAs(counterTwo)
+ }
+
+ "always return the same gauge metric" in {
+ val gaugeOne = Kamon.gauge("gauge-lookup")
+ val gaugeTwo = Kamon.gauge("gauge-lookup")
+ gaugeOne shouldBe theSameInstanceAs(gaugeTwo)
+ }
+
+ "always return the same min-max-counter metric" in {
+ val minMaxCounterOne = Kamon.minMaxCounter("min-max-counter-lookup")
+ val minMaxCounterTwo = Kamon.minMaxCounter("min-max-counter-lookup")
+ minMaxCounterOne shouldBe theSameInstanceAs(minMaxCounterTwo)
+ }
+ }
+
+ "refine a metric with tags and" should {
+ "always return the same histogram for a set of tags" in {
+ val histogramOne = Kamon.histogram("histogram-lookup").refine("tag" -> "value")
+ val histogramTwo = Kamon.histogram("histogram-lookup").refine("tag" -> "value")
+ histogramOne shouldBe theSameInstanceAs(histogramTwo)
+ }
+
+ "always return the same counter for a set of tags" in {
+ val counterOne = Kamon.counter("counter-lookup").refine("tag" -> "value")
+ val counterTwo = Kamon.counter("counter-lookup").refine("tag" -> "value")
+ counterOne shouldBe theSameInstanceAs(counterTwo)
+ }
+
+ "always return the same gauge for a set of tags" in {
+ val gaugeOne = Kamon.gauge("gauge-lookup").refine("tag" -> "value")
+ val gaugeTwo = Kamon.gauge("gauge-lookup").refine("tag" -> "value")
+ gaugeOne shouldBe theSameInstanceAs(gaugeTwo)
+ }
+
+ "always return the same min-max-counter for a set of tags" in {
+ val minMaxCounterOne = Kamon.minMaxCounter("min-max-counter-lookup").refine("tag" -> "value")
+ val minMaxCounterTwo = Kamon.minMaxCounter("min-max-counter-lookup").refine("tag" -> "value")
+ minMaxCounterOne shouldBe theSameInstanceAs(minMaxCounterTwo)
+ }
+ }
+ }
+
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/MinMaxCounterSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/MinMaxCounterSpec.scala
new file mode 100644
index 00000000..0ad3c45c
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/MinMaxCounterSpec.scala
@@ -0,0 +1,90 @@
+/* =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+
+package kamon.metric
+
+import java.time.Duration
+
+import org.scalatest.{Matchers, WordSpec}
+
+case class TemporalBucket(value: Long, frequency: Long) extends Bucket
+
+class MinMaxCounterSpec extends WordSpec with Matchers {
+
+ "a MinMaxCounter" should {
+ "track ascending tendencies" in {
+ val mmCounter = buildMinMaxCounter("track-ascending")
+ mmCounter.increment()
+ mmCounter.increment(3)
+ mmCounter.increment()
+
+ mmCounter.sample()
+
+ val snapshot = mmCounter.snapshot()
+
+ snapshot.distribution.min should be(0)
+ snapshot.distribution.max should be(5)
+ }
+
+ "track descending tendencies" in {
+ val mmCounter = buildMinMaxCounter("track-descending")
+ mmCounter.increment(5)
+ mmCounter.decrement()
+ mmCounter.decrement(3)
+ mmCounter.decrement()
+
+ mmCounter.sample()
+
+ val snapshot = mmCounter.snapshot()
+ snapshot.distribution.min should be(0)
+ snapshot.distribution.max should be(5)
+ }
+
+ "reset the min and max to the current value after taking a snapshot" in {
+ val mmCounter = buildMinMaxCounter("reset-min-max-to-current")
+
+ mmCounter.increment(5)
+ mmCounter.decrement(3)
+ mmCounter.sample()
+
+ val firstSnapshot = mmCounter.snapshot()
+ firstSnapshot.distribution.min should be(0)
+ firstSnapshot.distribution.max should be(5)
+
+ mmCounter.sample()
+
+ val secondSnapshot = mmCounter.snapshot()
+ secondSnapshot.distribution.min should be(2)
+ secondSnapshot.distribution.max should be(2)
+ }
+
+ "report zero as the min and current values if the current value fell bellow zero" in {
+ val mmCounter = buildMinMaxCounter("report-zero")
+
+ mmCounter.decrement(3)
+
+ mmCounter.sample()
+
+ val snapshot = mmCounter.snapshot()
+
+ snapshot.distribution.min should be(0)
+ snapshot.distribution.max should be(0)
+ }
+ }
+
+ def buildMinMaxCounter(name: String, tags: Map[String, String] = Map.empty, unit: MeasurementUnit = MeasurementUnit.none): SimpleMinMaxCounter =
+ new SimpleMinMaxCounter(name, tags, new AtomicHdrHistogram(name, tags, unit, dynamicRange = DynamicRange.Default), Duration.ofMillis(100))
+} \ No newline at end of file
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/RecorderRegistrySpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/RecorderRegistrySpec.scala
new file mode 100644
index 00000000..1053aa5f
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/RecorderRegistrySpec.scala
@@ -0,0 +1,58 @@
+/* =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import com.typesafe.config.ConfigFactory
+import org.scalatest.{Matchers, WordSpec}
+
+//class RecorderRegistrySpec extends WordSpec with Matchers {
+// private val testConfig = ConfigFactory.parseString(
+// """
+// |kamon.metric.filters {
+// | accept-unmatched = false
+// |
+// | my-category {
+// | includes = ["**"]
+// | excludes = ["excluded"]
+// | }
+// |}
+// """.stripMargin
+// )
+// private val recorderRegistry = new RecorderRegistryImpl(testConfig.withFallback(ConfigFactory.load()))
+//
+//
+// "the RecorderRegistry" should {
+// "create entity recorders as requested and always return the same instance for a given entity" in {
+// val myFirstEntityRecorder = recorderRegistry.getRecorder(Entity("my-entity", "my-category", Map.empty))
+// val mySecondEntityRecorder = recorderRegistry.getRecorder(Entity("my-entity", "my-category", Map.empty))
+// mySecondEntityRecorder shouldBe theSameInstanceAs(myFirstEntityRecorder)
+// }
+//
+// "properly advice regarding entity filtering read from configuration" in {
+// recorderRegistry.shouldTrack(Entity("my-entity", "my-category", Map.empty)) shouldBe true
+// recorderRegistry.shouldTrack(Entity("other-eny", "my-category", Map.empty)) shouldBe true
+// recorderRegistry.shouldTrack(Entity("excluded", "my-category", Map.empty)) shouldBe false
+// }
+//
+// "allow removing entities" in {
+// val myFirstEntityRecorder = recorderRegistry.getRecorder(Entity("my-entity", "my-category", Map.empty))
+// recorderRegistry.removeRecorder(Entity("my-entity", "my-category", Map.empty))
+//
+// val mySecondEntityRecorder = recorderRegistry.getRecorder(Entity("my-entity", "my-category", Map.empty))
+// mySecondEntityRecorder shouldNot be theSameInstanceAs(myFirstEntityRecorder)
+// }
+// }
+//}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/RegexPathFilterSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/RegexPathFilterSpec.scala
new file mode 100644
index 00000000..f742df1d
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/RegexPathFilterSpec.scala
@@ -0,0 +1,61 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon
+package metric
+
+import kamon.util.RegexMatcher
+import org.scalatest.{Matchers, WordSpecLike}
+
+class RegexPathFilterSpec extends WordSpecLike with Matchers {
+ "The RegexPathFilter" should {
+
+ "match a single expression" in {
+ val filter = new RegexMatcher("/user/actor")
+
+ filter.accept("/user/actor") shouldBe true
+
+ filter.accept("/user/actor/something") shouldBe false
+ filter.accept("/user/actor/somethingElse") shouldBe false
+ }
+
+ "match arbitray expressions ending with wildcard" in {
+ val filter = new RegexMatcher("/user/.*")
+
+ filter.accept("/user/actor") shouldBe true
+ filter.accept("/user/otherActor") shouldBe true
+ filter.accept("/user/something/actor") shouldBe true
+ filter.accept("/user/something/otherActor") shouldBe true
+
+ filter.accept("/otheruser/actor") shouldBe false
+ filter.accept("/otheruser/otherActor") shouldBe false
+ filter.accept("/otheruser/something/actor") shouldBe false
+ filter.accept("/otheruser/something/otherActor") shouldBe false
+ }
+
+ "match numbers" in {
+ val filter = new RegexMatcher("/user/actor-\\d")
+
+ filter.accept("/user/actor-1") shouldBe true
+ filter.accept("/user/actor-2") shouldBe true
+ filter.accept("/user/actor-3") shouldBe true
+
+ filter.accept("/user/actor-one") shouldBe false
+ filter.accept("/user/actor-two") shouldBe false
+ filter.accept("/user/actor-tree") shouldBe false
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/TimerSpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/TimerSpec.scala
new file mode 100644
index 00000000..3fc1e169
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/TimerSpec.scala
@@ -0,0 +1,72 @@
+package kamon.metric
+
+import kamon.Kamon
+import org.scalatest.{Matchers, WordSpec}
+
+
+class TimerSpec extends WordSpec with Matchers {
+ import TimerTestHelper._
+
+ "a Timer" should {
+ "record the duration between calls to .start() and .stop() in the StartedTimer" in {
+ val timer = Kamon.timer("timer-spec")
+ timer.start().stop()
+ timer.start().stop()
+ timer.start().stop()
+
+ timer.distribution().count shouldBe(3)
+ }
+
+ "ensure that a started timer can only be stopped once" in {
+ val timer = Kamon.timer("timer-spec")
+ val startedTimer = timer.start()
+ startedTimer.stop()
+ startedTimer.stop()
+ startedTimer.stop()
+
+ timer.distribution().count shouldBe(1)
+ }
+
+
+ "allow to record values and produce distributions as Histograms do" in {
+ val timer = Kamon.timer("test-timer")
+ timer.record(100)
+ timer.record(150, 998)
+ timer.record(200)
+
+ val distribution = timer.distribution()
+ distribution.min shouldBe(100)
+ distribution.max shouldBe(200)
+ distribution.count shouldBe(1000)
+ distribution.buckets.length shouldBe 3
+ distribution.buckets.map(b => (b.value, b.frequency)) should contain.allOf(
+ (100 -> 1),
+ (150 -> 998),
+ (200 -> 1)
+ )
+
+ val emptyDistribution = timer.distribution()
+ emptyDistribution.min shouldBe(0)
+ emptyDistribution.max shouldBe(0)
+ emptyDistribution.count shouldBe(0)
+ emptyDistribution.buckets.length shouldBe 0
+ }
+ }
+}
+
+object TimerTestHelper {
+
+ implicit class HistogramMetricSyntax(histogram: Histogram) {
+ def distribution(resetState: Boolean = true): Distribution = histogram match {
+ case h: AtomicHdrHistogram => h.snapshot(resetState).distribution
+ case h: HdrHistogram => h.snapshot(resetState).distribution
+ }
+ }
+
+ implicit class TimerMetricSyntax(metric: TimerMetric) {
+ def distribution(resetState: Boolean = true): Distribution =
+ metric.refine(Map.empty[String, String]) match {
+ case t: TimerImpl => t.histogram.distribution(resetState)
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/metric/instrument/InstrumentFactorySpec.scala b/kamon-core-tests/src/test/scala/kamon/metric/instrument/InstrumentFactorySpec.scala
new file mode 100644
index 00000000..21fe2b4d
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/metric/instrument/InstrumentFactorySpec.scala
@@ -0,0 +1,114 @@
+package kamon.metric.instrument
+
+//import java.time.Duration
+//
+//import com.typesafe.config.ConfigFactory
+//import kamon.metric.Entity
+//import org.scalatest.{Matchers, WordSpec}
+//
+//class InstrumentFactorySpec extends WordSpec with Matchers{
+// val testEntity = Entity("test", "test-category", Map.empty)
+// val customEntity = Entity("test", "custom-category", Map.empty)
+// val baseConfiguration = ConfigFactory.parseString(
+// """
+// |kamon.metric.instrument-factory {
+// | default-settings {
+// | histogram {
+// | lowest-discernible-value = 100
+// | highest-trackable-value = 5000
+// | significant-value-digits = 2
+// | }
+// |
+// | min-max-counter {
+// | lowest-discernible-value = 200
+// | highest-trackable-value = 6000
+// | significant-value-digits = 3
+// | sample-interval = 647 millis
+// | }
+// | }
+// |
+// | custom-settings {
+// |
+// | }
+// |}
+// """.stripMargin
+// )
+//
+//
+// "the metrics InstrumentFactory" should {
+// "create instruments using the default configuration settings" in {
+// val factory = InstrumentFactory.fromConfig(baseConfiguration)
+// val histogram = factory.buildHistogram(testEntity, "my-histogram")
+// val mmCounter = factory.buildMinMaxCounter(testEntity, "my-mm-counter")
+//
+// histogram.dynamicRange.lowestDiscernibleValue shouldBe(100)
+// histogram.dynamicRange.highestTrackableValue shouldBe(5000)
+// histogram.dynamicRange.significantValueDigits shouldBe(2)
+//
+// mmCounter.dynamicRange.lowestDiscernibleValue shouldBe(200)
+// mmCounter.dynamicRange.highestTrackableValue shouldBe(6000)
+// mmCounter.dynamicRange.significantValueDigits shouldBe(3)
+// mmCounter.sampleInterval shouldBe(Duration.ofMillis(647))
+// }
+//
+// "accept custom settings when building instruments" in {
+// val factory = InstrumentFactory.fromConfig(baseConfiguration)
+// val histogram = factory.buildHistogram(testEntity, "my-histogram", DynamicRange.Loose)
+// val mmCounter = factory.buildMinMaxCounter(testEntity, "my-mm-counter", DynamicRange.Fine, Duration.ofMillis(500))
+//
+// histogram.dynamicRange shouldBe(DynamicRange.Loose)
+//
+// mmCounter.dynamicRange shouldBe(DynamicRange.Fine)
+// mmCounter.sampleInterval shouldBe(Duration.ofMillis(500))
+// }
+//
+// "allow overriding any default and provided settings via the custom-settings configuration key" in {
+// val customConfig = ConfigFactory.parseString(
+// """
+// |kamon.metric.instrument-factory.custom-settings {
+// | custom-category {
+// | modified-histogram {
+// | lowest-discernible-value = 99
+// | highest-trackable-value = 999
+// | significant-value-digits = 4
+// | }
+// |
+// | modified-mm-counter {
+// | lowest-discernible-value = 784
+// | highest-trackable-value = 14785
+// | significant-value-digits = 1
+// | sample-interval = 3 seconds
+// | }
+// | }
+// |}
+// """.stripMargin
+// ).withFallback(baseConfiguration)
+//
+// val factory = InstrumentFactory.fromConfig(customConfig)
+// val defaultHistogram = factory.buildHistogram(customEntity, "default-histogram")
+// val modifiedHistogram = factory.buildHistogram(customEntity, "modified-histogram", DynamicRange.Loose)
+//
+// defaultHistogram.dynamicRange.lowestDiscernibleValue shouldBe(100)
+// defaultHistogram.dynamicRange.highestTrackableValue shouldBe(5000)
+// defaultHistogram.dynamicRange.significantValueDigits shouldBe(2)
+//
+// modifiedHistogram.dynamicRange.lowestDiscernibleValue shouldBe(99)
+// modifiedHistogram.dynamicRange.highestTrackableValue shouldBe(999)
+// modifiedHistogram.dynamicRange.significantValueDigits shouldBe(4)
+//
+//
+// val defaultMMCounter = factory.buildMinMaxCounter(customEntity, "default-mm-counter")
+// val modifiedMMCounter = factory.buildMinMaxCounter(customEntity, "modified-mm-counter", DynamicRange.Loose)
+//
+// defaultMMCounter.dynamicRange.lowestDiscernibleValue shouldBe(200)
+// defaultMMCounter.dynamicRange.highestTrackableValue shouldBe(6000)
+// defaultMMCounter.dynamicRange.significantValueDigits shouldBe(3)
+// defaultMMCounter.sampleInterval shouldBe(Duration.ofMillis(647))
+//
+// modifiedMMCounter.dynamicRange.lowestDiscernibleValue shouldBe(784)
+// modifiedMMCounter.dynamicRange.highestTrackableValue shouldBe(14785)
+// modifiedMMCounter.dynamicRange.significantValueDigits shouldBe(1)
+// modifiedMMCounter.sampleInterval shouldBe(Duration.ofSeconds(3))
+// }
+// }
+//}
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/B3SpanCodecSpec.scala b/kamon-core-tests/src/test/scala/kamon/trace/B3SpanCodecSpec.scala
new file mode 100644
index 00000000..e6fa283e
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/B3SpanCodecSpec.scala
@@ -0,0 +1,192 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import kamon.context.{Context, TextMap}
+import kamon.testkit.SpanBuilding
+import kamon.trace.IdentityProvider.Identifier
+import kamon.trace.SpanContext.SamplingDecision
+import org.scalatest.{Matchers, OptionValues, WordSpecLike}
+
+
+class B3SpanCodecSpec extends WordSpecLike with Matchers with OptionValues with SpanBuilding {
+ val extendedB3Codec = SpanCodec.B3()
+
+ "The ExtendedB3 SpanContextCodec" should {
+ "return a TextMap containing the SpanContext data" in {
+ val context = testContext()
+
+ val textMap = extendedB3Codec.encode(context)
+ textMap.get("X-B3-TraceId").value shouldBe "1234"
+ textMap.get("X-B3-ParentSpanId").value shouldBe "2222"
+ textMap.get("X-B3-SpanId").value shouldBe "4321"
+ textMap.get("X-B3-Sampled").value shouldBe "1"
+ }
+
+
+ "not inject anything if there is no Span in the Context" in {
+ val textMap = extendedB3Codec.encode(Context.Empty)
+ textMap.values shouldBe empty
+ }
+
+ "extract a RemoteSpan from a TextMap when all fields are set" in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-ParentSpanId", "2222")
+ textMap.put("X-B3-SpanId", "4321")
+ textMap.put("X-B3-Sampled", "1")
+ textMap.put("X-B3-Extra-Baggage", "some=baggage;more=baggage")
+
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
+ spanContext.traceID.string shouldBe "1234"
+ spanContext.spanID.string shouldBe "4321"
+ spanContext.parentID.string shouldBe "2222"
+ spanContext.samplingDecision shouldBe SamplingDecision.Sample
+ }
+
+ "decode the sampling decision based on the X-B3-Sampled header" in {
+ val sampledTextMap = TextMap.Default()
+ sampledTextMap.put("X-B3-TraceId", "1234")
+ sampledTextMap.put("X-B3-SpanId", "4321")
+ sampledTextMap.put("X-B3-Sampled", "1")
+
+ val notSampledTextMap = TextMap.Default()
+ notSampledTextMap.put("X-B3-TraceId", "1234")
+ notSampledTextMap.put("X-B3-SpanId", "4321")
+ notSampledTextMap.put("X-B3-Sampled", "0")
+
+ val noSamplingTextMap = TextMap.Default()
+ noSamplingTextMap.put("X-B3-TraceId", "1234")
+ noSamplingTextMap.put("X-B3-SpanId", "4321")
+
+ extendedB3Codec.decode(sampledTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.Sample
+
+ extendedB3Codec.decode(notSampledTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.DoNotSample
+
+ extendedB3Codec.decode(noSamplingTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.Unknown
+ }
+
+ "not include the X-B3-Sampled header if the sampling decision is unknown" in {
+ val context = testContext()
+ val sampledSpanContext = context.get(Span.ContextKey).context()
+ val notSampledSpanContext = Context.Empty.withKey(Span.ContextKey,
+ Span.Remote(sampledSpanContext.copy(samplingDecision = SamplingDecision.DoNotSample)))
+ val unknownSamplingSpanContext = Context.Empty.withKey(Span.ContextKey,
+ Span.Remote(sampledSpanContext.copy(samplingDecision = SamplingDecision.Unknown)))
+
+ extendedB3Codec.encode(context).get("X-B3-Sampled").value shouldBe("1")
+ extendedB3Codec.encode(notSampledSpanContext).get("X-B3-Sampled").value shouldBe("0")
+ extendedB3Codec.encode(unknownSamplingSpanContext).get("X-B3-Sampled") shouldBe empty
+ }
+
+ "use the Debug flag to override the sampling decision, if provided." in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-SpanId", "4321")
+ textMap.put("X-B3-Sampled", "0")
+ textMap.put("X-B3-Flags", "1")
+
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
+ spanContext.samplingDecision shouldBe SamplingDecision.Sample
+ }
+
+ "use the Debug flag as sampling decision when Sampled is not provided" in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-SpanId", "4321")
+ textMap.put("X-B3-Flags", "1")
+
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
+ spanContext.samplingDecision shouldBe SamplingDecision.Sample
+ }
+
+ "extract a minimal SpanContext from a TextMap containing only the Trace ID and Span ID" in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-SpanId", "4321")
+
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
+ spanContext.traceID.string shouldBe "1234"
+ spanContext.spanID.string shouldBe "4321"
+ spanContext.parentID shouldBe IdentityProvider.NoIdentifier
+ spanContext.samplingDecision shouldBe SamplingDecision.Unknown
+ }
+
+ "do not extract a SpanContext if Trace ID and Span ID are not provided" in {
+ val onlyTraceID = TextMap.Default()
+ onlyTraceID.put("X-B3-TraceId", "1234")
+ onlyTraceID.put("X-B3-Sampled", "0")
+ onlyTraceID.put("X-B3-Flags", "1")
+
+ val onlySpanID = TextMap.Default()
+ onlySpanID.put("X-B3-SpanId", "4321")
+ onlySpanID.put("X-B3-Sampled", "0")
+ onlySpanID.put("X-B3-Flags", "1")
+
+ val noIds = TextMap.Default()
+ noIds.put("X-B3-Sampled", "0")
+ noIds.put("X-B3-Flags", "1")
+
+ extendedB3Codec.decode(onlyTraceID, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
+ extendedB3Codec.decode(onlySpanID, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
+ extendedB3Codec.decode(noIds, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
+ }
+
+ "round trip a Span from TextMap -> Context -> TextMap" in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-ParentSpanId", "2222")
+ textMap.put("X-B3-SpanId", "4321")
+ textMap.put("X-B3-Sampled", "1")
+
+ val context = extendedB3Codec.decode(textMap, Context.Empty)
+ val injectTextMap = extendedB3Codec.encode(context)
+
+ textMap.values.toSeq should contain theSameElementsAs(injectTextMap.values.toSeq)
+ }
+
+ /*
+ // TODO: Should we be supporting this use case? maybe even have the concept of Debug requests ourselves?
+ "internally carry the X-B3-Flags value so that it can be injected in outgoing requests" in {
+ val textMap = TextMap.Default()
+ textMap.put("X-B3-TraceId", "1234")
+ textMap.put("X-B3-ParentSpanId", "2222")
+ textMap.put("X-B3-SpanId", "4321")
+ textMap.put("X-B3-Sampled", "1")
+ textMap.put("X-B3-Flags", "1")
+
+ val spanContext = extendedB3Codec.extract(textMap).value
+ val injectTextMap = extendedB3Codec.inject(spanContext)
+
+ injectTextMap.get("X-B3-Flags").value shouldBe("1")
+ }*/
+ }
+
+ def testContext(): Context = {
+ val spanContext = createSpanContext().copy(
+ traceID = Identifier("1234", Array[Byte](1, 2, 3, 4)),
+ spanID = Identifier("4321", Array[Byte](4, 3, 2, 1)),
+ parentID = Identifier("2222", Array[Byte](2, 2, 2, 2))
+ )
+
+ Context.create().withKey(Span.ContextKey, Span.Remote(spanContext))
+ }
+
+} \ No newline at end of file
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala b/kamon-core-tests/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala
new file mode 100644
index 00000000..8f9af7b0
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala
@@ -0,0 +1,52 @@
+package kamon.trace
+
+import kamon.trace.IdentityProvider.Identifier
+import org.scalatest.{Matchers, OptionValues, WordSpecLike}
+import org.scalactic.TimesOnInt._
+
+class DefaultIdentityGeneratorSpec extends WordSpecLike with Matchers with OptionValues {
+ val idProvider = IdentityProvider.Default()
+ val traceGenerator = idProvider.traceIdGenerator()
+ val spanGenerator = idProvider.spanIdGenerator()
+
+ validateGenerator("TraceID Generator", traceGenerator)
+ validateGenerator("SpanID Generator", spanGenerator)
+
+ def validateGenerator(generatorName: String, generator: IdentityProvider.Generator) = {
+ s"The $generatorName" should {
+ "generate random longs (8 byte) identifiers" in {
+ 100 times {
+ val Identifier(string, bytes) = generator.generate()
+
+ string.length should be(16)
+ bytes.length should be(8)
+ }
+ }
+
+ "decode the string representation back into a identifier" in {
+ 100 times {
+ val identifier = generator.generate()
+ val decodedIdentifier = generator.from(identifier.string)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "decode the bytes representation back into a identifier" in {
+ 100 times {
+ val identifier = generator.generate()
+ val decodedIdentifier = generator.from(identifier.bytes)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "return IdentityProvider.NoIdentifier if the provided input cannot be decoded into a Identifier" in {
+ generator.from("zzzz") shouldBe(IdentityProvider.NoIdentifier)
+ generator.from(Array[Byte](1)) shouldBe(IdentityProvider.NoIdentifier)
+ }
+ }
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala b/kamon-core-tests/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala
new file mode 100644
index 00000000..b22f17e1
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala
@@ -0,0 +1,86 @@
+package kamon.trace
+
+import kamon.trace.IdentityProvider.Identifier
+import org.scalactic.TimesOnInt._
+import org.scalatest.{Matchers, OptionValues, WordSpecLike}
+
+class DoubleLengthTraceIdentityGeneratorSpec extends WordSpecLike with Matchers with OptionValues {
+ val idProvider = IdentityProvider.DoubleSizeTraceID()
+ val traceGenerator = idProvider.traceIdGenerator()
+ val spanGenerator = idProvider.spanIdGenerator()
+
+ "The DoubleSizeTraceID identity provider" when {
+ "generating trace identifiers" should {
+ "generate random longs (16 byte) identifiers" in {
+ 100 times {
+ val Identifier(string, bytes) = traceGenerator.generate()
+
+ string.length should be(32)
+ bytes.length should be(16)
+ }
+ }
+
+ "decode the string representation back into a identifier" in {
+ 100 times {
+ val identifier = traceGenerator.generate()
+ val decodedIdentifier = traceGenerator.from(identifier.string)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "decode the bytes representation back into a identifier" in {
+ 100 times {
+ val identifier = traceGenerator.generate()
+ val decodedIdentifier = traceGenerator.from(identifier.bytes)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "return IdentityProvider.NoIdentifier if the provided input cannot be decoded into a Identifier" in {
+ traceGenerator.from("zzzz") shouldBe (IdentityProvider.NoIdentifier)
+ traceGenerator.from(Array[Byte](1)) shouldBe (IdentityProvider.NoIdentifier)
+ }
+ }
+
+ "generating span identifiers" should {
+ "generate random longs (8 byte) identifiers" in {
+ 100 times {
+ val Identifier(string, bytes) = spanGenerator.generate()
+
+ string.length should be(16)
+ bytes.length should be(8)
+ }
+ }
+
+ "decode the string representation back into a identifier" in {
+ 100 times {
+ val identifier = spanGenerator.generate()
+ val decodedIdentifier = spanGenerator.from(identifier.string)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "decode the bytes representation back into a identifier" in {
+ 100 times {
+ val identifier = spanGenerator.generate()
+ val decodedIdentifier = spanGenerator.from(identifier.bytes)
+
+ identifier.string should equal(decodedIdentifier.string)
+ identifier.bytes should equal(decodedIdentifier.bytes)
+ }
+ }
+
+ "return IdentityProvider.NoIdentifier if the provided input cannot be decoded into a Identifier" in {
+ spanGenerator.from("zzzz") shouldBe (IdentityProvider.NoIdentifier)
+ spanGenerator.from(Array[Byte](1)) shouldBe (IdentityProvider.NoIdentifier)
+ }
+ }
+ }
+
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/LocalSpanSpec.scala b/kamon-core-tests/src/test/scala/kamon/trace/LocalSpanSpec.scala
new file mode 100644
index 00000000..e24f8727
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/LocalSpanSpec.scala
@@ -0,0 +1,100 @@
+package kamon.trace
+
+import kamon.testkit.{MetricInspection, Reconfigure, TestSpanReporter}
+import kamon.util.Registration
+import kamon.Kamon
+import kamon.trace.Span.{Annotation, TagValue}
+import org.scalatest.concurrent.Eventually
+import org.scalatest.{BeforeAndAfterAll, Matchers, OptionValues, WordSpec}
+import org.scalatest.time.SpanSugar._
+
+class LocalSpanSpec extends WordSpec with Matchers with BeforeAndAfterAll with Eventually with OptionValues
+ with Reconfigure with MetricInspection {
+
+ "a real span" when {
+ "sampled and finished" should {
+ "be sent to the Span reporters" in {
+ Kamon.buildSpan("test-span")
+ .withSpanTag("test", "value")
+ .withStartTimestamp(100)
+ .start()
+ .finish(200)
+
+ eventually(timeout(2 seconds)) {
+ val finishedSpan = reporter.nextSpan().value
+ finishedSpan.operationName shouldBe("test-span")
+ finishedSpan.startTimestampMicros shouldBe 100
+ finishedSpan.endTimestampMicros shouldBe 200
+ finishedSpan.tags should contain("test" -> TagValue.String("value"))
+ }
+ }
+
+ "pass all the tags, annotations and baggage to the FinishedSpan instance when started and finished" in {
+ Kamon.buildSpan("full-span")
+ .withSpanTag("builder-string-tag", "value")
+ .withSpanTag("builder-boolean-tag-true", true)
+ .withSpanTag("builder-boolean-tag-false", false)
+ .withSpanTag("builder-number-tag", 42)
+ .withStartTimestamp(100)
+ .start()
+ .addSpanTag("span-string-tag", "value")
+ .addSpanTag("span-boolean-tag-true", true)
+ .addSpanTag("span-boolean-tag-false", false)
+ .addSpanTag("span-number-tag", 42)
+ .annotate("simple-annotation")
+ .annotate("regular-annotation", Map("data" -> "something"))
+ .annotate(4200, "custom-annotation-1", Map("custom" -> "yes-1"))
+ .annotate(Annotation(4201, "custom-annotation-2", Map("custom" -> "yes-2")))
+ .setOperationName("fully-populated-span")
+ .finish(200)
+
+ eventually(timeout(2 seconds)) {
+ val finishedSpan = reporter.nextSpan().value
+ finishedSpan.operationName shouldBe ("fully-populated-span")
+ finishedSpan.startTimestampMicros shouldBe 100
+ finishedSpan.endTimestampMicros shouldBe 200
+ finishedSpan.tags should contain allOf(
+ "builder-string-tag" -> TagValue.String("value"),
+ "builder-boolean-tag-true" -> TagValue.True,
+ "builder-boolean-tag-false" -> TagValue.False,
+ "builder-number-tag" -> TagValue.Number(42),
+ "span-string-tag" -> TagValue.String("value"),
+ "span-boolean-tag-true" -> TagValue.True,
+ "span-boolean-tag-false" -> TagValue.False,
+ "span-number-tag" -> TagValue.Number(42)
+ )
+
+ finishedSpan.annotations.length shouldBe (4)
+ val annotations = finishedSpan.annotations.groupBy(_.name)
+ annotations.keys should contain allOf(
+ "simple-annotation",
+ "regular-annotation",
+ "custom-annotation-1",
+ "custom-annotation-2"
+ )
+
+ val customAnnotationOne = annotations("custom-annotation-1").head
+ customAnnotationOne.timestampMicros shouldBe (4200)
+ customAnnotationOne.fields shouldBe (Map("custom" -> "yes-1"))
+
+ val customAnnotationTwo = annotations("custom-annotation-2").head
+ customAnnotationTwo.timestampMicros shouldBe (4201)
+ customAnnotationTwo.fields shouldBe (Map("custom" -> "yes-2"))
+ }
+ }
+ }
+ }
+
+ @volatile var registration: Registration = _
+ val reporter = new TestSpanReporter()
+
+ override protected def beforeAll(): Unit = {
+ enableFastSpanFlushing()
+ sampleAlways()
+ registration = Kamon.addReporter(reporter)
+ }
+
+ override protected def afterAll(): Unit = {
+ registration.cancel()
+ }
+}
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/SpanMetrics.scala b/kamon-core-tests/src/test/scala/kamon/trace/SpanMetrics.scala
new file mode 100644
index 00000000..9ecffb24
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/SpanMetrics.scala
@@ -0,0 +1,64 @@
+package kamon.trace
+
+import kamon.Kamon
+import kamon.Kamon.buildSpan
+import kamon.metric._
+import org.scalatest.{Matchers, WordSpecLike}
+
+class SpanMetrics extends WordSpecLike with Matchers {
+ import SpanMetricsTestHelper._
+
+ val errorTag = "error" -> "true"
+ val histogramMetric: HistogramMetric = Kamon.histogram("span.elapsed-time")
+
+ "Span Metrics" should {
+ "be recorded for successeful execution" in {
+ val operation = "span-success"
+ val operationTag = "operation" -> operation
+
+ buildSpan(operation)
+ .start()
+ .finish()
+
+ val histogram = histogramMetric.refine(operationTag)
+ histogram.distribution().count === 1
+
+ val errorHistogram = histogramMetric.refine(Map(operationTag, errorTag)).distribution()
+ errorHistogram.count === 0
+
+ }
+
+ "record correctly error latency and count" in {
+ val operation = "span-failure"
+ val operationTag = "operation" -> operation
+
+ buildSpan(operation)
+ .start()
+ .addSpanTag("error", true)
+ .finish()
+
+ val histogram = histogramMetric.refine(operationTag)
+ histogram.distribution().count === 0
+
+ val errorHistogram = histogramMetric.refine(operationTag, errorTag).distribution()
+ errorHistogram.count === 1
+
+ }
+ }
+
+}
+
+object SpanMetricsTestHelper {
+
+ implicit class HistogramMetricSyntax(histogram: Histogram) {
+ def distribution(resetState: Boolean = true): Distribution =
+ histogram match {
+ case hm: HistogramMetric => hm.refine(Map.empty[String, String]).distribution(resetState)
+ case h: AtomicHdrHistogram => h.snapshot(resetState).distribution
+ case h: HdrHistogram => h.snapshot(resetState).distribution
+ }
+ }
+}
+
+
+
diff --git a/kamon-core-tests/src/test/scala/kamon/trace/TracerSpec.scala b/kamon-core-tests/src/test/scala/kamon/trace/TracerSpec.scala
new file mode 100644
index 00000000..fb5bb313
--- /dev/null
+++ b/kamon-core-tests/src/test/scala/kamon/trace/TracerSpec.scala
@@ -0,0 +1,103 @@
+package kamon.trace
+
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.context.Context
+import kamon.testkit.{SpanBuilding, SpanInspector}
+import kamon.trace.Span.TagValue
+import org.scalatest.{Matchers, OptionValues, WordSpec}
+
+class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionValues {
+
+ "the Kamon tracer" should {
+ "construct a minimal Span that only has a operation name" in {
+ val span = tracer.buildSpan("myOperation").start()
+ val spanData = inspect(span)
+
+ spanData.operationName() shouldBe "myOperation"
+ spanData.metricTags() shouldBe empty
+ spanData.spanTags() shouldBe empty
+ }
+
+ "pass the operation name and tags to started Span" in {
+ val span = tracer.buildSpan("myOperation")
+ .withMetricTag("metric-tag", "value")
+ .withMetricTag("metric-tag", "value")
+ .withSpanTag("hello", "world")
+ .withSpanTag("kamon", "rulez")
+ .withSpanTag("number", 123)
+ .withSpanTag("boolean", true)
+ .start()
+
+ val spanData = inspect(span)
+ spanData.operationName() shouldBe "myOperation"
+ spanData.metricTags() should contain only (
+ ("metric-tag" -> "value"))
+
+ spanData.spanTags() should contain allOf(
+ ("hello" -> TagValue.String("world")),
+ ("kamon" -> TagValue.String("rulez")),
+ ("number" -> TagValue.Number(123)),
+ ("boolean" -> TagValue.True))
+ }
+
+ "not have any parent Span if there is ActiveSpan and no parent was explicitly given" in {
+ val span = tracer.buildSpan("myOperation").start()
+ val spanData = inspect(span)
+ spanData.context().parentID shouldBe IdentityProvider.NoIdentifier
+ }
+
+
+ "automatically take the Span from the current Context as parent" in {
+ val parent = tracer.buildSpan("myOperation").start()
+ val child = Kamon.withContext(Context.create(Span.ContextKey, parent)) {
+ tracer.buildSpan("childOperation").asChildOf(parent).start()
+ }
+
+ val parentData = inspect(parent)
+ val childData = inspect(child)
+ parentData.context().spanID shouldBe childData.context().parentID
+ }
+
+ "ignore the currently active span as parent if explicitly requested" in {
+ val parent = tracer.buildSpan("myOperation").start()
+ val child = Kamon.withContext(Context.create(Span.ContextKey, parent)) {
+ tracer.buildSpan("childOperation").ignoreActiveSpan().start()
+ }
+
+ val childData = inspect(child)
+ childData.context().parentID shouldBe IdentityProvider.NoIdentifier
+ }
+
+ "allow overriding the start timestamp for a Span" in {
+ val span = tracer.buildSpan("myOperation").withStartTimestamp(100).start()
+ val spanData = inspect(span)
+ spanData.startTimestamp() shouldBe 100
+ }
+
+ "preserve the same Span and Parent identifier when creating a Span with a remote parent if join-remote-parents-with-same-span-id is enabled" in {
+ val previousConfig = Kamon.config()
+
+ Kamon.reconfigure {
+ ConfigFactory.parseString("kamon.trace.join-remote-parents-with-same-span-id = yes")
+ .withFallback(Kamon.config())
+ }
+
+ val remoteParent = Span.Remote(createSpanContext())
+ val childData = inspect(tracer.buildSpan("local").asChildOf(remoteParent).start())
+
+ childData.context().traceID shouldBe remoteParent.context.traceID
+ childData.context().parentID shouldBe remoteParent.context.parentID
+ childData.context().spanID shouldBe remoteParent.context.spanID
+
+ Kamon.reconfigure(previousConfig)
+ }
+
+ }
+
+ val tracer: Tracer = Kamon
+
+ def inspect(span: Span): SpanInspector =
+ SpanInspector(span)
+
+}