aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIvan Topolnjak <ivantopo@gmail.com>2017-08-14 23:28:52 +0200
committerGitHub <noreply@github.com>2017-08-14 23:28:52 +0200
commita949c875684d78818224cd2ca7aaf79aa7878724 (patch)
treece84ab802ba3c543b4b107e32b7cac4dea610fc4
parent18b9fc25d556fef50c5033f8880fab2594783caa (diff)
parent3144a04ea42a4e333b7a608597c07c0458c9f147 (diff)
downloadKamon-a949c875684d78818224cd2ca7aaf79aa7878724.tar.gz
Kamon-a949c875684d78818224cd2ca7aaf79aa7878724.tar.bz2
Kamon-a949c875684d78818224cd2ca7aaf79aa7878724.zip
Merge pull request #1 from ivantopo/wip/context-management-reloaded
remove context management from the Tracer
-rw-r--r--kamon-core/src/main/colfer/context.colf10
-rw-r--r--kamon-core/src/main/java/kamon/context/encoding/Context.java359
-rw-r--r--kamon-core/src/main/java/kamon/context/encoding/Entry.java442
-rw-r--r--kamon-core/src/main/resources/reference.conf13
-rw-r--r--kamon-core/src/main/scala/kamon/Kamon.scala31
-rw-r--r--kamon-core/src/main/scala/kamon/context/Codec.scala130
-rw-r--r--kamon-core/src/main/scala/kamon/context/Context.scala50
-rw-r--r--kamon-core/src/main/scala/kamon/context/Mixin.scala (renamed from kamon-core/src/main/scala/kamon/util/Mixin.scala)21
-rw-r--r--kamon-core/src/main/scala/kamon/context/Storage.scala39
-rw-r--r--kamon-core/src/main/scala/kamon/trace/ActiveSpanStorage.scala74
-rw-r--r--kamon-core/src/main/scala/kamon/trace/IdentityProvider.scala10
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Span.scala78
-rw-r--r--kamon-core/src/main/scala/kamon/trace/SpanCodec.scala99
-rw-r--r--kamon-core/src/main/scala/kamon/trace/SpanContext.scala63
-rw-r--r--kamon-core/src/main/scala/kamon/trace/SpanContextCodec.scala174
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Tracer.scala105
-rw-r--r--kamon-core/src/test/scala/kamon/LogInterceptor.scala30
-rw-r--r--kamon-core/src/test/scala/kamon/context/ContextCodecSpec.scala18
-rw-r--r--kamon-core/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala41
-rw-r--r--kamon-core/src/test/scala/kamon/testkit/SpanBuilding.scala16
-rw-r--r--kamon-core/src/test/scala/kamon/testkit/SpanInspector.scala8
-rw-r--r--kamon-core/src/test/scala/kamon/trace/ActiveSpanManagementSpec.scala65
-rw-r--r--kamon-core/src/test/scala/kamon/trace/B3SpanCodecSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/ExtendedB3SpanContextCodecSpec.scala)108
-rw-r--r--kamon-core/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala4
-rw-r--r--kamon-core/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala4
-rw-r--r--kamon-core/src/test/scala/kamon/trace/LocalSpanSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/RealSpanSpec.scala)82
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TracerSpec.scala83
27 files changed, 1373 insertions, 784 deletions
diff --git a/kamon-core/src/main/colfer/context.colf b/kamon-core/src/main/colfer/context.colf
new file mode 100644
index 00000000..26421cba
--- /dev/null
+++ b/kamon-core/src/main/colfer/context.colf
@@ -0,0 +1,10 @@
+package kamon
+
+type Entry struct {
+ name text
+ content binary
+}
+
+type Context struct {
+ entries []Entry
+} \ No newline at end of file
diff --git a/kamon-core/src/main/java/kamon/context/encoding/Context.java b/kamon-core/src/main/java/kamon/context/encoding/Context.java
new file mode 100644
index 00000000..db6ed7a9
--- /dev/null
+++ b/kamon-core/src/main/java/kamon/context/encoding/Context.java
@@ -0,0 +1,359 @@
+package kamon.context.encoding;
+
+
+// Code generated by colf(1); DO NOT EDIT.
+
+
+import static java.lang.String.format;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectStreamException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.util.InputMismatchException;
+import java.nio.BufferOverflowException;
+import java.nio.BufferUnderflowException;
+
+
+/**
+ * Data bean with built-in serialization support.
+
+ * @author generated by colf(1)
+ * @see <a href="https://github.com/pascaldekloe/colfer">Colfer's home</a>
+ */
+@javax.annotation.Generated(value="colf(1)", comments="Colfer from schema file context.colf")
+public class Context implements Serializable {
+
+ /** The upper limit for serial byte sizes. */
+ public static int colferSizeMax = 16 * 1024 * 1024;
+
+ /** The upper limit for the number of elements in a list. */
+ public static int colferListMax = 64 * 1024;
+
+
+
+
+ public Entry[] entries;
+
+
+ /** Default constructor */
+ public Context() {
+ init();
+ }
+
+ private static final Entry[] _zeroEntries = new Entry[0];
+
+ /** Colfer zero values. */
+ private void init() {
+ entries = _zeroEntries;
+ }
+
+ /**
+ * {@link #reset(InputStream) Reusable} deserialization of Colfer streams.
+ */
+ public static class Unmarshaller {
+
+ /** The data source. */
+ protected InputStream in;
+
+ /** The read buffer. */
+ public byte[] buf;
+
+ /** The {@link #buf buffer}'s data start index, inclusive. */
+ protected int offset;
+
+ /** The {@link #buf buffer}'s data end index, exclusive. */
+ protected int i;
+
+
+ /**
+ * @param in the data source or {@code null}.
+ * @param buf the initial buffer or {@code null}.
+ */
+ public Unmarshaller(InputStream in, byte[] buf) {
+ // TODO: better size estimation
+ if (buf == null || buf.length == 0)
+ buf = new byte[Math.min(Context.colferSizeMax, 2048)];
+ this.buf = buf;
+ reset(in);
+ }
+
+ /**
+ * Reuses the marshaller.
+ * @param in the data source or {@code null}.
+ * @throws IllegalStateException on pending data.
+ */
+ public void reset(InputStream in) {
+ if (this.i != this.offset) throw new IllegalStateException("colfer: pending data");
+ this.in = in;
+ this.offset = 0;
+ this.i = 0;
+ }
+
+ /**
+ * Deserializes the following object.
+ * @return the result or {@code null} when EOF.
+ * @throws IOException from the input stream.
+ * @throws SecurityException on an upper limit breach defined by either {@link #colferSizeMax} or {@link #colferListMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public Context next() throws IOException {
+ if (in == null) return null;
+
+ while (true) {
+ if (this.i > this.offset) {
+ try {
+ Context o = new Context();
+ this.offset = o.unmarshal(this.buf, this.offset, this.i);
+ return o;
+ } catch (BufferUnderflowException e) {
+ }
+ }
+ // not enough data
+
+ if (this.i <= this.offset) {
+ this.offset = 0;
+ this.i = 0;
+ } else if (i == buf.length) {
+ byte[] src = this.buf;
+ // TODO: better size estimation
+ if (offset == 0) this.buf = new byte[Math.min(Context.colferSizeMax, this.buf.length * 4)];
+ System.arraycopy(src, this.offset, this.buf, 0, this.i - this.offset);
+ this.i -= this.offset;
+ this.offset = 0;
+ }
+ assert this.i < this.buf.length;
+
+ int n = in.read(buf, i, buf.length - i);
+ if (n < 0) {
+ if (this.i > this.offset)
+ throw new InputMismatchException("colfer: pending data with EOF");
+ return null;
+ }
+ assert n > 0;
+ i += n;
+ }
+ }
+
+ }
+
+
+ /**
+ * Serializes the object.
+ * All {@code null} elements in {@link #entries} will be replaced with a {@code new} value.
+ * @param out the data destination.
+ * @param buf the initial buffer or {@code null}.
+ * @return the final buffer. When the serial fits into {@code buf} then the return is {@code buf}.
+ * Otherwise the return is a new buffer, large enough to hold the whole serial.
+ * @throws IOException from {@code out}.
+ * @throws IllegalStateException on an upper limit breach defined by either {@link #colferSizeMax} or {@link #colferListMax}.
+ */
+ public byte[] marshal(OutputStream out, byte[] buf) throws IOException {
+ // TODO: better size estimation
+ if (buf == null || buf.length == 0)
+ buf = new byte[Math.min(Context.colferSizeMax, 2048)];
+
+ while (true) {
+ int i;
+ try {
+ i = marshal(buf, 0);
+ } catch (BufferOverflowException e) {
+ buf = new byte[Math.min(Context.colferSizeMax, buf.length * 4)];
+ continue;
+ }
+
+ out.write(buf, 0, i);
+ return buf;
+ }
+ }
+
+ /**
+ * Serializes the object.
+ * All {@code null} elements in {@link #entries} will be replaced with a {@code new} value.
+ * @param buf the data destination.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferOverflowException when {@code buf} is too small.
+ * @throws IllegalStateException on an upper limit breach defined by either {@link #colferSizeMax} or {@link #colferListMax}.
+ */
+ public int marshal(byte[] buf, int offset) {
+ int i = offset;
+
+ try {
+ if (this.entries.length != 0) {
+ buf[i++] = (byte) 0;
+ Entry[] a = this.entries;
+
+ int x = a.length;
+ if (x > Context.colferListMax)
+ throw new IllegalStateException(format("colfer: kamon/context/kamon.Context.entries length %d exceeds %d elements", x, Context.colferListMax));
+ while (x > 0x7f) {
+ buf[i++] = (byte) (x | 0x80);
+ x >>>= 7;
+ }
+ buf[i++] = (byte) x;
+
+ for (int ai = 0; ai < a.length; ai++) {
+ Entry o = a[ai];
+ if (o == null) {
+ o = new Entry();
+ a[ai] = o;
+ }
+ i = o.marshal(buf, i);
+ }
+ }
+
+ buf[i++] = (byte) 0x7f;
+ return i;
+ } catch (ArrayIndexOutOfBoundsException e) {
+ if (i - offset > Context.colferSizeMax)
+ throw new IllegalStateException(format("colfer: kamon/context/kamon.Context exceeds %d bytes", Context.colferSizeMax));
+ if (i > buf.length) throw new BufferOverflowException();
+ throw e;
+ }
+ }
+
+ /**
+ * Deserializes the object.
+ * @param buf the data source.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferUnderflowException when {@code buf} is incomplete. (EOF)
+ * @throws SecurityException on an upper limit breach defined by either {@link #colferSizeMax} or {@link #colferListMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public int unmarshal(byte[] buf, int offset) {
+ return unmarshal(buf, offset, buf.length);
+ }
+
+ /**
+ * Deserializes the object.
+ * @param buf the data source.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @param end the index limit for {@code buf}, exclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferUnderflowException when {@code buf} is incomplete. (EOF)
+ * @throws SecurityException on an upper limit breach defined by either {@link #colferSizeMax} or {@link #colferListMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public int unmarshal(byte[] buf, int offset, int end) {
+ if (end > buf.length) end = buf.length;
+ int i = offset;
+
+ try {
+ byte header = buf[i++];
+
+ if (header == (byte) 0) {
+ int length = 0;
+ for (int shift = 0; true; shift += 7) {
+ byte b = buf[i++];
+ length |= (b & 0x7f) << shift;
+ if (shift == 28 || b >= 0) break;
+ }
+ if (length < 0 || length > Context.colferListMax)
+ throw new SecurityException(format("colfer: kamon/context/kamon.Context.entries length %d exceeds %d elements", length, Context.colferListMax));
+
+ Entry[] a = new Entry[length];
+ for (int ai = 0; ai < length; ai++) {
+ Entry o = new Entry();
+ i = o.unmarshal(buf, i, end);
+ a[ai] = o;
+ }
+ this.entries = a;
+ header = buf[i++];
+ }
+
+ if (header != (byte) 0x7f)
+ throw new InputMismatchException(format("colfer: unknown header at byte %d", i - 1));
+ } finally {
+ if (i > end && end - offset < Context.colferSizeMax) throw new BufferUnderflowException();
+ if (i < 0 || i - offset > Context.colferSizeMax)
+ throw new SecurityException(format("colfer: kamon/context/kamon.Context exceeds %d bytes", Context.colferSizeMax));
+ if (i > end) throw new BufferUnderflowException();
+ }
+
+ return i;
+ }
+
+ // {@link Serializable} version number.
+ private static final long serialVersionUID = 1L;
+
+ // {@link Serializable} Colfer extension.
+ private void writeObject(ObjectOutputStream out) throws IOException {
+ // TODO: better size estimation
+ byte[] buf = new byte[1024];
+ int n;
+ while (true) try {
+ n = marshal(buf, 0);
+ break;
+ } catch (BufferUnderflowException e) {
+ buf = new byte[4 * buf.length];
+ }
+
+ out.writeInt(n);
+ out.write(buf, 0, n);
+ }
+
+ // {@link Serializable} Colfer extension.
+ private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {
+ init();
+
+ int n = in.readInt();
+ byte[] buf = new byte[n];
+ in.readFully(buf);
+ unmarshal(buf, 0);
+ }
+
+ // {@link Serializable} Colfer extension.
+ private void readObjectNoData() throws ObjectStreamException {
+ init();
+ }
+
+ /**
+ * Gets kamon/context/kamon.Context.entries.
+ * @return the value.
+ */
+ public Entry[] getEntries() {
+ return this.entries;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Context.entries.
+ * @param value the replacement.
+ */
+ public void setEntries(Entry[] value) {
+ this.entries = value;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Context.entries.
+ * @param value the replacement.
+ * @return {link this}.
+ */
+ public Context withEntries(Entry[] value) {
+ this.entries = value;
+ return this;
+ }
+
+ @Override
+ public final int hashCode() {
+ int h = 1;
+ for (Entry o : this.entries) h = 31 * h + (o == null ? 0 : o.hashCode());
+ return h;
+ }
+
+ @Override
+ public final boolean equals(Object o) {
+ return o instanceof Context && equals((Context) o);
+ }
+
+ public final boolean equals(Context o) {
+ if (o == null) return false;
+ if (o == this) return true;
+ return o.getClass() == Context.class
+ && java.util.Arrays.equals(this.entries, o.entries);
+ }
+
+}
diff --git a/kamon-core/src/main/java/kamon/context/encoding/Entry.java b/kamon-core/src/main/java/kamon/context/encoding/Entry.java
new file mode 100644
index 00000000..d7734c13
--- /dev/null
+++ b/kamon-core/src/main/java/kamon/context/encoding/Entry.java
@@ -0,0 +1,442 @@
+package kamon.context.encoding;
+
+
+// Code generated by colf(1); DO NOT EDIT.
+
+
+import static java.lang.String.format;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectStreamException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.charset.StandardCharsets;
+import java.util.InputMismatchException;
+import java.nio.BufferOverflowException;
+import java.nio.BufferUnderflowException;
+
+
+/**
+ * Data bean with built-in serialization support.
+
+ * @author generated by colf(1)
+ * @see <a href="https://github.com/pascaldekloe/colfer">Colfer's home</a>
+ */
+@javax.annotation.Generated(value="colf(1)", comments="Colfer from schema file context.colf")
+public class Entry implements Serializable {
+
+ /** The upper limit for serial byte sizes. */
+ public static int colferSizeMax = 16 * 1024 * 1024;
+
+
+
+
+ public String name;
+
+ public byte[] content;
+
+
+ /** Default constructor */
+ public Entry() {
+ init();
+ }
+
+ private static final byte[] _zeroBytes = new byte[0];
+
+ /** Colfer zero values. */
+ private void init() {
+ name = "";
+ content = _zeroBytes;
+ }
+
+ /**
+ * {@link #reset(InputStream) Reusable} deserialization of Colfer streams.
+ */
+ public static class Unmarshaller {
+
+ /** The data source. */
+ protected InputStream in;
+
+ /** The read buffer. */
+ public byte[] buf;
+
+ /** The {@link #buf buffer}'s data start index, inclusive. */
+ protected int offset;
+
+ /** The {@link #buf buffer}'s data end index, exclusive. */
+ protected int i;
+
+
+ /**
+ * @param in the data source or {@code null}.
+ * @param buf the initial buffer or {@code null}.
+ */
+ public Unmarshaller(InputStream in, byte[] buf) {
+ // TODO: better size estimation
+ if (buf == null || buf.length == 0)
+ buf = new byte[Math.min(Entry.colferSizeMax, 2048)];
+ this.buf = buf;
+ reset(in);
+ }
+
+ /**
+ * Reuses the marshaller.
+ * @param in the data source or {@code null}.
+ * @throws IllegalStateException on pending data.
+ */
+ public void reset(InputStream in) {
+ if (this.i != this.offset) throw new IllegalStateException("colfer: pending data");
+ this.in = in;
+ this.offset = 0;
+ this.i = 0;
+ }
+
+ /**
+ * Deserializes the following object.
+ * @return the result or {@code null} when EOF.
+ * @throws IOException from the input stream.
+ * @throws SecurityException on an upper limit breach defined by {@link #colferSizeMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public Entry next() throws IOException {
+ if (in == null) return null;
+
+ while (true) {
+ if (this.i > this.offset) {
+ try {
+ Entry o = new Entry();
+ this.offset = o.unmarshal(this.buf, this.offset, this.i);
+ return o;
+ } catch (BufferUnderflowException e) {
+ }
+ }
+ // not enough data
+
+ if (this.i <= this.offset) {
+ this.offset = 0;
+ this.i = 0;
+ } else if (i == buf.length) {
+ byte[] src = this.buf;
+ // TODO: better size estimation
+ if (offset == 0) this.buf = new byte[Math.min(Entry.colferSizeMax, this.buf.length * 4)];
+ System.arraycopy(src, this.offset, this.buf, 0, this.i - this.offset);
+ this.i -= this.offset;
+ this.offset = 0;
+ }
+ assert this.i < this.buf.length;
+
+ int n = in.read(buf, i, buf.length - i);
+ if (n < 0) {
+ if (this.i > this.offset)
+ throw new InputMismatchException("colfer: pending data with EOF");
+ return null;
+ }
+ assert n > 0;
+ i += n;
+ }
+ }
+
+ }
+
+
+ /**
+ * Serializes the object.
+ * @param out the data destination.
+ * @param buf the initial buffer or {@code null}.
+ * @return the final buffer. When the serial fits into {@code buf} then the return is {@code buf}.
+ * Otherwise the return is a new buffer, large enough to hold the whole serial.
+ * @throws IOException from {@code out}.
+ * @throws IllegalStateException on an upper limit breach defined by {@link #colferSizeMax}.
+ */
+ public byte[] marshal(OutputStream out, byte[] buf) throws IOException {
+ // TODO: better size estimation
+ if (buf == null || buf.length == 0)
+ buf = new byte[Math.min(Entry.colferSizeMax, 2048)];
+
+ while (true) {
+ int i;
+ try {
+ i = marshal(buf, 0);
+ } catch (BufferOverflowException e) {
+ buf = new byte[Math.min(Entry.colferSizeMax, buf.length * 4)];
+ continue;
+ }
+
+ out.write(buf, 0, i);
+ return buf;
+ }
+ }
+
+ /**
+ * Serializes the object.
+ * @param buf the data destination.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferOverflowException when {@code buf} is too small.
+ * @throws IllegalStateException on an upper limit breach defined by {@link #colferSizeMax}.
+ */
+ public int marshal(byte[] buf, int offset) {
+ int i = offset;
+
+ try {
+ if (! this.name.isEmpty()) {
+ buf[i++] = (byte) 0;
+ int start = ++i;
+
+ String s = this.name;
+ for (int sIndex = 0, sLength = s.length(); sIndex < sLength; sIndex++) {
+ char c = s.charAt(sIndex);
+ if (c < '\u0080') {
+ buf[i++] = (byte) c;
+ } else if (c < '\u0800') {
+ buf[i++] = (byte) (192 | c >>> 6);
+ buf[i++] = (byte) (128 | c & 63);
+ } else if (c < '\ud800' || c > '\udfff') {
+ buf[i++] = (byte) (224 | c >>> 12);
+ buf[i++] = (byte) (128 | c >>> 6 & 63);
+ buf[i++] = (byte) (128 | c & 63);
+ } else {
+ int cp = 0;
+ if (++sIndex < sLength) cp = Character.toCodePoint(c, s.charAt(sIndex));
+ if ((cp >= 1 << 16) && (cp < 1 << 21)) {
+ buf[i++] = (byte) (240 | cp >>> 18);
+ buf[i++] = (byte) (128 | cp >>> 12 & 63);
+ buf[i++] = (byte) (128 | cp >>> 6 & 63);
+ buf[i++] = (byte) (128 | cp & 63);
+ } else
+ buf[i++] = (byte) '?';
+ }
+ }
+ int size = i - start;
+ if (size > Entry.colferSizeMax)
+ throw new IllegalStateException(format("colfer: kamon/context/kamon.Entry.name size %d exceeds %d UTF-8 bytes", size, Entry.colferSizeMax));
+
+ int ii = start - 1;
+ if (size > 0x7f) {
+ i++;
+ for (int x = size; x >= 1 << 14; x >>>= 7) i++;
+ System.arraycopy(buf, start, buf, i - size, size);
+
+ do {
+ buf[ii++] = (byte) (size | 0x80);
+ size >>>= 7;
+ } while (size > 0x7f);
+ }
+ buf[ii] = (byte) size;
+ }
+
+ if (this.content.length != 0) {
+ buf[i++] = (byte) 1;
+
+ int size = this.content.length;
+ if (size > Entry.colferSizeMax)
+ throw new IllegalStateException(format("colfer: kamon/context/kamon.Entry.content size %d exceeds %d bytes", size, Entry.colferSizeMax));
+
+ int x = size;
+ while (x > 0x7f) {
+ buf[i++] = (byte) (x | 0x80);
+ x >>>= 7;
+ }
+ buf[i++] = (byte) x;
+
+ int start = i;
+ i += size;
+ System.arraycopy(this.content, 0, buf, start, size);
+ }
+
+ buf[i++] = (byte) 0x7f;
+ return i;
+ } catch (ArrayIndexOutOfBoundsException e) {
+ if (i - offset > Entry.colferSizeMax)
+ throw new IllegalStateException(format("colfer: kamon/context/kamon.Entry exceeds %d bytes", Entry.colferSizeMax));
+ if (i > buf.length) throw new BufferOverflowException();
+ throw e;
+ }
+ }
+
+ /**
+ * Deserializes the object.
+ * @param buf the data source.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferUnderflowException when {@code buf} is incomplete. (EOF)
+ * @throws SecurityException on an upper limit breach defined by {@link #colferSizeMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public int unmarshal(byte[] buf, int offset) {
+ return unmarshal(buf, offset, buf.length);
+ }
+
+ /**
+ * Deserializes the object.
+ * @param buf the data source.
+ * @param offset the initial index for {@code buf}, inclusive.
+ * @param end the index limit for {@code buf}, exclusive.
+ * @return the final index for {@code buf}, exclusive.
+ * @throws BufferUnderflowException when {@code buf} is incomplete. (EOF)
+ * @throws SecurityException on an upper limit breach defined by {@link #colferSizeMax}.
+ * @throws InputMismatchException when the data does not match this object's schema.
+ */
+ public int unmarshal(byte[] buf, int offset, int end) {
+ if (end > buf.length) end = buf.length;
+ int i = offset;
+
+ try {
+ byte header = buf[i++];
+
+ if (header == (byte) 0) {
+ int size = 0;
+ for (int shift = 0; true; shift += 7) {
+ byte b = buf[i++];
+ size |= (b & 0x7f) << shift;
+ if (shift == 28 || b >= 0) break;
+ }
+ if (size < 0 || size > Entry.colferSizeMax)
+ throw new SecurityException(format("colfer: kamon/context/kamon.Entry.name size %d exceeds %d UTF-8 bytes", size, Entry.colferSizeMax));
+
+ int start = i;
+ i += size;
+ this.name = new String(buf, start, size, StandardCharsets.UTF_8);
+ header = buf[i++];
+ }
+
+ if (header == (byte) 1) {
+ int size = 0;
+ for (int shift = 0; true; shift += 7) {
+ byte b = buf[i++];
+ size |= (b & 0x7f) << shift;
+ if (shift == 28 || b >= 0) break;
+ }
+ if (size < 0 || size > Entry.colferSizeMax)
+ throw new SecurityException(format("colfer: kamon/context/kamon.Entry.content size %d exceeds %d bytes", size, Entry.colferSizeMax));
+
+ this.content = new byte[size];
+ int start = i;
+ i += size;
+ System.arraycopy(buf, start, this.content, 0, size);
+
+ header = buf[i++];
+ }
+
+ if (header != (byte) 0x7f)
+ throw new InputMismatchException(format("colfer: unknown header at byte %d", i - 1));
+ } finally {
+ if (i > end && end - offset < Entry.colferSizeMax) throw new BufferUnderflowException();
+ if (i < 0 || i - offset > Entry.colferSizeMax)
+ throw new SecurityException(format("colfer: kamon/context/kamon.Entry exceeds %d bytes", Entry.colferSizeMax));
+ if (i > end) throw new BufferUnderflowException();
+ }
+
+ return i;
+ }
+
+ // {@link Serializable} version number.
+ private static final long serialVersionUID = 2L;
+
+ // {@link Serializable} Colfer extension.
+ private void writeObject(ObjectOutputStream out) throws IOException {
+ // TODO: better size estimation
+ byte[] buf = new byte[1024];
+ int n;
+ while (true) try {
+ n = marshal(buf, 0);
+ break;
+ } catch (BufferUnderflowException e) {
+ buf = new byte[4 * buf.length];
+ }
+
+ out.writeInt(n);
+ out.write(buf, 0, n);
+ }
+
+ // {@link Serializable} Colfer extension.
+ private void readObject(ObjectInputStream in) throws ClassNotFoundException, IOException {
+ init();
+
+ int n = in.readInt();
+ byte[] buf = new byte[n];
+ in.readFully(buf);
+ unmarshal(buf, 0);
+ }
+
+ // {@link Serializable} Colfer extension.
+ private void readObjectNoData() throws ObjectStreamException {
+ init();
+ }
+
+ /**
+ * Gets kamon/context/kamon.Entry.name.
+ * @return the value.
+ */
+ public String getName() {
+ return this.name;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Entry.name.
+ * @param value the replacement.
+ */
+ public void setName(String value) {
+ this.name = value;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Entry.name.
+ * @param value the replacement.
+ * @return {link this}.
+ */
+ public Entry withName(String value) {
+ this.name = value;
+ return this;
+ }
+
+ /**
+ * Gets kamon/context/kamon.Entry.content.
+ * @return the value.
+ */
+ public byte[] getContent() {
+ return this.content;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Entry.content.
+ * @param value the replacement.
+ */
+ public void setContent(byte[] value) {
+ this.content = value;
+ }
+
+ /**
+ * Sets kamon/context/kamon.Entry.content.
+ * @param value the replacement.
+ * @return {link this}.
+ */
+ public Entry withContent(byte[] value) {
+ this.content = value;
+ return this;
+ }
+
+ @Override
+ public final int hashCode() {
+ int h = 1;
+ if (this.name != null) h = 31 * h + this.name.hashCode();
+ for (byte b : this.content) h = 31 * h + b;
+ return h;
+ }
+
+ @Override
+ public final boolean equals(Object o) {
+ return o instanceof Entry && equals((Entry) o);
+ }
+
+ public final boolean equals(Entry o) {
+ if (o == null) return false;
+ if (o == this) return true;
+ return o.getClass() == Entry.class
+ && (this.name == null ? o.name == null : this.name.equals(o.name))
+ && java.util.Arrays.equals(this.content, o.content);
+ }
+
+}
diff --git a/kamon-core/src/main/resources/reference.conf b/kamon-core/src/main/resources/reference.conf
index dd42ab03..ad180f1c 100644
--- a/kamon-core/src/main/resources/reference.conf
+++ b/kamon-core/src/main/resources/reference.conf
@@ -140,6 +140,19 @@ kamon {
}
+ context {
+ encoding {
+
+ http-headers {
+ span = "kamon.trace.SpanCodec$B3"
+ }
+
+ binary {
+ # span = "kamon.trace.propagation.Binary"
+ }
+ }
+ }
+
util {
filters {
diff --git a/kamon-core/src/main/scala/kamon/Kamon.scala b/kamon-core/src/main/scala/kamon/Kamon.scala
index 7c3beb84..b1490e32 100644
--- a/kamon-core/src/main/scala/kamon/Kamon.scala
+++ b/kamon-core/src/main/scala/kamon/Kamon.scala
@@ -24,7 +24,7 @@ import scala.concurrent.Future
import java.time.Duration
import java.util.concurrent.{Executors, ScheduledExecutorService, ScheduledThreadPoolExecutor}
-import kamon.trace.SpanContextCodec.Format
+import kamon.context.{Context, Storage}
import org.slf4j.LoggerFactory
import scala.util.Try
@@ -41,6 +41,7 @@ object Kamon extends MetricLookup with ReporterRegistry with Tracer {
private val _metrics = new MetricRegistry(_config, _scheduler)
private val _reporters = new ReporterRegistryImpl(_metrics, _config)
private val _tracer = Tracer.Default(Kamon, _reporters, _config)
+ private val _contextStorage = Storage.ThreadLocal()
private var _onReconfigureHooks = Seq.empty[OnReconfigureHook]
def environment: Environment =
@@ -93,30 +94,20 @@ object Kamon extends MetricLookup with ReporterRegistry with Tracer {
override def buildSpan(operationName: String): Tracer.SpanBuilder =
_tracer.buildSpan(operationName)
- override def extract[C](format: Format[C], carrier: C): Option[SpanContext] =
- _tracer.extract(format, carrier)
- override def inject[C](spanContext: SpanContext, format: Format[C], carrier: C): C =
- _tracer.inject(spanContext, format, carrier)
+ override def identityProvider: IdentityProvider =
+ _tracer.identityProvider
- override def inject[C](spanContext: SpanContext, format: Format[C]): C =
- _tracer.inject(spanContext, format)
+ def currentContext(): Context =
+ _contextStorage.current()
- override def activeSpan(): Span =
- _tracer.activeSpan()
-
- override def activate(span: Span): Scope =
- _tracer.activate(span)
-
-
- /**
- * Makes the provided Span active before code is evaluated and deactivates it afterwards.
- */
- def withActiveSpan[T](span: Span)(code: => T): T = {
- val scope = activate(span)
+ def storeContext(context: Context): Storage.Scope =
+ _contextStorage.store(context)
+ def withContext[T](context: Context)(f: => T): T = {
+ val scope = _contextStorage.store(context)
try {
- code
+ f
} finally {
scope.close()
}
diff --git a/kamon-core/src/main/scala/kamon/context/Codec.scala b/kamon-core/src/main/scala/kamon/context/Codec.scala
new file mode 100644
index 00000000..50b7e93d
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/context/Codec.scala
@@ -0,0 +1,130 @@
+package kamon
+package context
+
+import com.typesafe.config.Config
+import kamon.trace.IdentityProvider
+import kamon.util.DynamicAccess
+import org.slf4j.LoggerFactory
+
+import scala.collection.mutable
+
+class Codec(identityProvider: IdentityProvider, initialConfig: Config) {
+ private val log = LoggerFactory.getLogger(classOf[Codec])
+
+ @volatile private var httpHeaders: Codec.ForContext[TextMap] = new Codec.HttpHeaders(Map.empty)
+ //val Binary: Codec.ForContext[ByteBuffer] = _
+ reconfigure(initialConfig)
+
+
+ def HttpHeaders: Codec.ForContext[TextMap] =
+ httpHeaders
+
+ def reconfigure(config: Config): Unit = {
+ httpHeaders = new Codec.HttpHeaders(readEntryCodecs("kamon.context.encoding.http-headers", config))
+ }
+
+ private def readEntryCodecs[T](rootKey: String, config: Config): Map[String, Codec.ForEntry[T]] = {
+ val rootConfig = config.getConfig(rootKey)
+ val dynamic = new DynamicAccess(getClass.getClassLoader)
+ val entries = Map.newBuilder[String, Codec.ForEntry[T]]
+
+ rootConfig.topLevelKeys.foreach(key => {
+ try {
+ val fqcn = rootConfig.getString(key)
+ entries += ((key, dynamic.createInstanceFor[Codec.ForEntry[T]](fqcn, Nil).get))
+ } catch {
+ case e: Throwable =>
+ log.error(s"Failed to initialize codec for key [$key]", e)
+ }
+ })
+
+ entries.result()
+ }
+}
+
+object Codec {
+
+ trait ForContext[T] {
+ def encode(context: Context): T
+ def decode(carrier: T): Context
+ }
+
+ trait ForEntry[T] {
+ def encode(context: Context): T
+ def decode(carrier: T, context: Context): Context
+ }
+
+ final class HttpHeaders(entryCodecs: Map[String, Codec.ForEntry[TextMap]]) extends Codec.ForContext[TextMap] {
+ private val log = LoggerFactory.getLogger(classOf[HttpHeaders])
+
+ override def encode(context: Context): TextMap = {
+ val encoded = TextMap.Default()
+
+ context.entries.foreach {
+ case (key, _) if key.broadcast =>
+ entryCodecs.get(key.name) match {
+ case Some(codec) =>
+ try {
+ codec.encode(context).values.foreach(pair => encoded.put(pair._1, pair._2))
+ } catch {
+ case e: Throwable => log.error(s"Failed to encode key [${key.name}]", e)
+ }
+
+ case None =>
+ log.error("Context key [{}] should be encoded in HttpHeaders but no codec was found for it.", key.name)
+ }
+ }
+
+ encoded
+ }
+
+ override def decode(carrier: TextMap): Context = {
+ var context: Context = Context.Empty
+
+ try {
+ context = entryCodecs.foldLeft(context)((ctx, codecEntry) => {
+ val (_, codec) = codecEntry
+ codec.decode(carrier, ctx)
+ })
+
+ } catch {
+ case e: Throwable =>
+ log.error("Failed to decode context from HttpHeaders", e)
+ }
+
+ context
+ }
+ }
+}
+
+
+trait TextMap {
+
+ def get(key: String): Option[String]
+
+ def put(key: String, value: String): Unit
+
+ def values: Iterator[(String, String)]
+}
+
+object TextMap {
+
+ class Default extends TextMap {
+ private val storage =
+ mutable.Map.empty[String, String]
+
+ override def get(key: String): Option[String] =
+ storage.get(key)
+
+ override def put(key: String, value: String): Unit =
+ storage.put(key, value)
+
+ override def values: Iterator[(String, String)] =
+ storage.toIterator
+ }
+
+ object Default {
+ def apply(): Default =
+ new Default()
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/context/Context.scala b/kamon-core/src/main/scala/kamon/context/Context.scala
new file mode 100644
index 00000000..f8a4662f
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/context/Context.scala
@@ -0,0 +1,50 @@
+package kamon.context
+
+class Context private (private[context] val entries: Map[Key[_], Any]) {
+ def get[T](key: Key[T]): T =
+ entries.get(key).getOrElse(key.emptyValue).asInstanceOf[T]
+
+ def withKey[T](key: Key[T], value: T): Context =
+ new Context(entries.updated(key, value))
+}
+
+object Context {
+ val Empty = new Context(Map.empty)
+
+ def apply(): Context =
+ Empty
+
+ def create(): Context =
+ Empty
+
+ def apply[T](key: Key[T], value: T): Context =
+ new Context(Map(key -> value))
+
+ def create[T](key: Key[T], value: T): Context =
+ apply(key, value)
+}
+
+
+trait Key[T] {
+ def name: String
+ def emptyValue: T
+ def broadcast: Boolean
+}
+
+object Key {
+
+ def local[T](name: String, emptyValue: T): Key[T] =
+ new Default[T](name, emptyValue, false)
+
+ def broadcast[T](name: String, emptyValue: T): Key[T] =
+ new Default[T](name, emptyValue, true)
+
+
+ private class Default[T](val name: String, val emptyValue: T, val broadcast: Boolean) extends Key[T] {
+ override def hashCode(): Int =
+ name.hashCode
+
+ override def equals(that: Any): Boolean =
+ that.isInstanceOf[Default[_]] && that.asInstanceOf[Default[_]].name == this.name
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/util/Mixin.scala b/kamon-core/src/main/scala/kamon/context/Mixin.scala
index 2fd7be24..64e03748 100644
--- a/kamon-core/src/main/scala/kamon/util/Mixin.scala
+++ b/kamon-core/src/main/scala/kamon/context/Mixin.scala
@@ -13,34 +13,33 @@
* =========================================================================================
*/
-package kamon
-package util
+package kamon.context
-import kamon.trace.Span
+import kamon.Kamon
/**
* Utility trait that marks objects carrying a reference to a Span.
*
*/
-trait HasSpan {
- def span: Span
+trait HasContext {
+ def context: Context
}
-object HasSpan {
- private case class Default(span: Span) extends HasSpan
+object HasContext {
+ private case class Default(context: Context) extends HasContext
/**
* Construct a HasSpan instance that references the provided Span.
*
*/
- def from(span: Span): HasSpan =
- Default(span)
+ def from(context: Context): HasContext =
+ Default(context)
/**
* Construct a HasSpan instance that references the currently ActiveSpan in Kamon's tracer.
*
*/
- def fromActiveSpan(): HasSpan =
- Default(Kamon.activeSpan())
+ def fromCurrentContext(): HasContext =
+ Default(Kamon.currentContext())
}
diff --git a/kamon-core/src/main/scala/kamon/context/Storage.scala b/kamon-core/src/main/scala/kamon/context/Storage.scala
new file mode 100644
index 00000000..6b92ff85
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/context/Storage.scala
@@ -0,0 +1,39 @@
+package kamon.context
+
+trait Storage {
+ def current(): Context
+ def store(context: Context): Storage.Scope
+}
+
+object Storage {
+
+ trait Scope {
+ def context: Context
+ def close(): Unit
+ }
+
+
+ class ThreadLocal extends Storage {
+ private val tls = new java.lang.ThreadLocal[Context]() {
+ override def initialValue(): Context = Context.Empty
+ }
+
+ override def current(): Context =
+ tls.get()
+
+ override def store(context: Context): Scope = {
+ val newContext = context
+ val previousContext = tls.get()
+ tls.set(newContext)
+
+ new Scope {
+ override def context: Context = newContext
+ override def close(): Unit = tls.set(previousContext)
+ }
+ }
+ }
+
+ object ThreadLocal {
+ def apply(): ThreadLocal = new ThreadLocal()
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/ActiveSpanStorage.scala b/kamon-core/src/main/scala/kamon/trace/ActiveSpanStorage.scala
deleted file mode 100644
index 85e94ef2..00000000
--- a/kamon-core/src/main/scala/kamon/trace/ActiveSpanStorage.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-package kamon.trace
-
-/**
- * A means of storing and retrieving the currently active Span. The application code execution is always considered to
- * contribute to the completion of the operation represented by the currently active Span.
- *
- * The activation of a Span is of temporary nature and users of this API must ensure that all Scopes created via calls
- * to `activate(span)` are timely closed; failing to do so might lead to unexpected behavior. Typically, the same block
- * of code designating a Span as currently active will close the created Scope after finishing execution.
- *
- */
-trait ActiveSpanStorage {
-
- /**
- * @return the currently active Span.
- */
- def activeSpan(): Span
-
- /**
- * Sets
- * @param span the Span to be set as currently active.
- * @return a [[Scope]] that will finish the designation of the given Span as active once it's closed.
- */
- def activate(span: Span): Scope
-
-}
-
-/**
- * Encapsulates the state (if any) required to handle the removal of a Span from it's currently active designation.
- *
- * Typically a Scope will enclose the previously active Span and return the previously active Span when closed,
- * although no assumptions are made.
- *
- */
-trait Scope extends AutoCloseable {
-
- /**
- * Removes the currently active Span from the ActiveSpanStorage.
- *
- */
- def close(): Unit
-}
-
-object ActiveSpanStorage {
-
- /**
- * A ActiveSpanStorage that uses a [[java.lang.ThreadLocal]] as the underlying storage.
- *
- */
- final class ThreadLocal extends ActiveSpanStorage {
- private val emptySpan = Span.Empty(this)
- private val storage: java.lang.ThreadLocal[Span] = new java.lang.ThreadLocal[Span] {
- override def initialValue(): Span = emptySpan
- }
-
- override def activeSpan(): Span =
- storage.get()
-
- override def activate(span: Span): Scope = {
- val previouslyActiveSpan = storage.get()
- storage.set(span)
-
- new Scope {
- override def close(): Unit = {
- storage.set(previouslyActiveSpan)
- }
- }
- }
- }
-
- object ThreadLocal {
- def apply(): ThreadLocal = new ThreadLocal()
- }
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/IdentityProvider.scala b/kamon-core/src/main/scala/kamon/trace/IdentityProvider.scala
index 3f44629e..937200f5 100644
--- a/kamon-core/src/main/scala/kamon/trace/IdentityProvider.scala
+++ b/kamon-core/src/main/scala/kamon/trace/IdentityProvider.scala
@@ -8,8 +8,8 @@ import kamon.util.HexCodec
import scala.util.Try
trait IdentityProvider {
- def traceIdentifierGenerator(): IdentityProvider.Generator
- def spanIdentifierGenerator(): IdentityProvider.Generator
+ def traceIdGenerator(): IdentityProvider.Generator
+ def spanIdGenerator(): IdentityProvider.Generator
}
object IdentityProvider {
@@ -57,8 +57,8 @@ object IdentityProvider {
} getOrElse(IdentityProvider.NoIdentifier)
}
- override def traceIdentifierGenerator(): Generator = longGenerator
- override def spanIdentifierGenerator(): Generator = longGenerator
+ override def traceIdGenerator(): Generator = longGenerator
+ override def spanIdGenerator(): Generator = longGenerator
}
object Default {
@@ -97,7 +97,7 @@ object IdentityProvider {
} getOrElse(IdentityProvider.NoIdentifier)
}
- override def traceIdentifierGenerator(): Generator = doubleLongGenerator
+ override def traceIdGenerator(): Generator = doubleLongGenerator
}
object DoubleSizeTraceID {
diff --git a/kamon-core/src/main/scala/kamon/trace/Span.scala b/kamon-core/src/main/scala/kamon/trace/Span.scala
index 84cc5625..161042d5 100644
--- a/kamon-core/src/main/scala/kamon/trace/Span.scala
+++ b/kamon-core/src/main/scala/kamon/trace/Span.scala
@@ -17,15 +17,18 @@ package kamon
package trace
import kamon.ReporterRegistry.SpanSink
+import kamon.context.Key
import kamon.trace.SpanContext.SamplingDecision
-
import kamon.util.{Clock, MeasurementUnit}
-/**
- * Minimum set of capabilities that should be provided by a Span, all additional sugar is provided by extensions
- * in the Span trait bellow.
- */
-trait BaseSpan {
+
+trait Span {
+
+ def isEmpty(): Boolean
+ def isLocal(): Boolean
+
+ def nonEmpty(): Boolean = !isEmpty()
+ def isRemote(): Boolean = !isLocal()
def context(): SpanContext
@@ -39,21 +42,11 @@ trait BaseSpan {
def addMetricTag(key: String, value: String): Span
- def addBaggage(key: String, value: String): Span
-
- def getBaggage(key: String): Option[String]
-
def setOperationName(name: String): Span
def disableMetricsCollection(): Span
def finish(finishTimestampMicros: Long): Unit
-}
-
-/**
- *
- */
-trait Span extends BaseSpan {
def finish(): Unit =
finish(Clock.microTimestamp())
@@ -71,25 +64,22 @@ trait Span extends BaseSpan {
object Span {
- final class Empty(activeSpanSource: ActiveSpanStorage) extends Span {
- override val context: SpanContext = SpanContext.EmptySpanContext
+ val ContextKey = Key.broadcast[Span]("span", Span.Empty)
+ object Empty extends Span {
+ override val context: SpanContext = SpanContext.EmptySpanContext
+ override def isEmpty(): Boolean = true
+ override def isLocal(): Boolean = true
override def annotate(annotation: Annotation): Span = this
override def addSpanTag(key: String, value: String): Span = this
override def addSpanTag(key: String, value: Long): Span = this
override def addSpanTag(key: String, value: Boolean): Span = this
override def addMetricTag(key: String, value: String): Span = this
- override def addBaggage(key: String, value: String): Span = this
- override def getBaggage(key: String): Option[String] = None
override def setOperationName(name: String): Span = this
override def disableMetricsCollection(): Span = this
override def finish(finishTimestampMicros: Long): Unit = {}
}
- object Empty {
- def apply(activeSpanSource: ActiveSpanStorage): Empty = new Empty(activeSpanSource)
- }
-
/**
*
* @param spanContext
@@ -98,8 +88,8 @@ object Span {
* @param startTimestampMicros
* @param spanSink
*/
- final class Real(spanContext: SpanContext, initialOperationName: String, initialSpanTags: Map[String, Span.TagValue],
- initialMetricTags: Map[String, String], startTimestampMicros: Long, spanSink: SpanSink, activeSpanSource: ActiveSpanStorage) extends Span {
+ final class Local(spanContext: SpanContext, initialOperationName: String, initialSpanTags: Map[String, Span.TagValue],
+ initialMetricTags: Map[String, String], startTimestampMicros: Long, spanSink: SpanSink) extends Span {
private var collectMetrics: Boolean = true
private var open: Boolean = true
@@ -110,6 +100,9 @@ object Span {
private var customMetricTags = initialMetricTags
private var annotations = List.empty[Span.Annotation]
+ override def isEmpty(): Boolean = false
+ override def isLocal(): Boolean = true
+
def annotate(annotation: Annotation): Span = synchronized {
if(sampled && open)
annotations = annotation :: annotations
@@ -142,14 +135,6 @@ object Span {
this
}
- override def addBaggage(key: String, value: String): Span = {
- spanContext.baggage.add(key, value)
- this
- }
-
- override def getBaggage(key: String): Option[String] =
- spanContext.baggage.get(key)
-
override def disableMetricsCollection(): Span = synchronized {
collectMetrics = false
this
@@ -194,10 +179,29 @@ object Span {
}
}
- object Real {
+ object Local {
def apply(spanContext: SpanContext, initialOperationName: String, initialSpanTags: Map[String, Span.TagValue],
- initialMetricTags: Map[String, String], startTimestampMicros: Long, reporterRegistry: ReporterRegistryImpl, tracer: Tracer): Real =
- new Real(spanContext, initialOperationName, initialSpanTags, initialMetricTags, startTimestampMicros, reporterRegistry, tracer)
+ initialMetricTags: Map[String, String], startTimestampMicros: Long, reporterRegistry: ReporterRegistryImpl): Local =
+ new Local(spanContext, initialOperationName, initialSpanTags, initialMetricTags, startTimestampMicros, reporterRegistry)
+ }
+
+
+ final class Remote(val context: SpanContext) extends Span {
+ override def isEmpty(): Boolean = false
+ override def isLocal(): Boolean = false
+ override def annotate(annotation: Annotation): Span = this
+ override def addSpanTag(key: String, value: String): Span = this
+ override def addSpanTag(key: String, value: Long): Span = this
+ override def addSpanTag(key: String, value: Boolean): Span = this
+ override def addMetricTag(key: String, value: String): Span = this
+ override def setOperationName(name: String): Span = this
+ override def disableMetricsCollection(): Span = this
+ override def finish(finishTimestampMicros: Long): Unit = {}
+ }
+
+ object Remote {
+ def apply(spanContext: SpanContext): Remote =
+ new Remote(spanContext)
}
sealed trait TagValue
diff --git a/kamon-core/src/main/scala/kamon/trace/SpanCodec.scala b/kamon-core/src/main/scala/kamon/trace/SpanCodec.scala
new file mode 100644
index 00000000..e04ceb03
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/SpanCodec.scala
@@ -0,0 +1,99 @@
+/* =========================================================================================
+ * Copyright © 2013-2017 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.net.{URLDecoder, URLEncoder}
+
+import kamon.Kamon
+import kamon.context.{Codec, Context, TextMap}
+import kamon.trace.SpanContext.SamplingDecision
+
+
+object SpanCodec {
+
+ class B3 extends Codec.ForEntry[TextMap] {
+ import B3.Headers
+
+ override def encode(context: Context): TextMap = {
+ val span = context.get(Span.ContextKey)
+ val carrier = TextMap.Default()
+
+ if(span.nonEmpty()) {
+ val spanContext = span.context
+ carrier.put(Headers.TraceIdentifier, urlEncode(spanContext.traceID.string))
+ carrier.put(Headers.SpanIdentifier, urlEncode(spanContext.spanID.string))
+ carrier.put(Headers.ParentSpanIdentifier, urlEncode(spanContext.parentID.string))
+
+ encodeSamplingDecision(spanContext.samplingDecision).foreach { samplingDecision =>
+ carrier.put(Headers.Sampled, samplingDecision)
+ }
+ }
+
+ carrier
+ }
+
+ override def decode(carrier: TextMap, context: Context): Context = {
+ val identityProvider = Kamon.tracer.identityProvider
+ val traceID = carrier.get(Headers.TraceIdentifier)
+ .map(id => identityProvider.traceIdGenerator().from(urlDecode(id)))
+ .getOrElse(IdentityProvider.NoIdentifier)
+
+ val spanID = carrier.get(Headers.SpanIdentifier)
+ .map(id => identityProvider.spanIdGenerator().from(urlDecode(id)))
+ .getOrElse(IdentityProvider.NoIdentifier)
+
+ if(traceID != IdentityProvider.NoIdentifier && spanID != IdentityProvider.NoIdentifier) {
+ val parentID = carrier.get(Headers.ParentSpanIdentifier)
+ .map(id => identityProvider.spanIdGenerator().from(urlDecode(id)))
+ .getOrElse(IdentityProvider.NoIdentifier)
+
+ val flags = carrier.get(Headers.Flags)
+
+ val samplingDecision = flags.orElse(carrier.get(Headers.Sampled)) match {
+ case Some(sampled) if sampled == "1" => SamplingDecision.Sample
+ case Some(sampled) if sampled == "0" => SamplingDecision.DoNotSample
+ case _ => SamplingDecision.Unknown
+ }
+
+ context.withKey(Span.ContextKey, Span.Remote(SpanContext(traceID, spanID, parentID, samplingDecision)))
+
+ } else context
+ }
+
+ private def encodeSamplingDecision(samplingDecision: SamplingDecision): Option[String] = samplingDecision match {
+ case SamplingDecision.Sample => Some("1")
+ case SamplingDecision.DoNotSample => Some("0")
+ case SamplingDecision.Unknown => None
+ }
+
+ private def urlEncode(s: String): String = URLEncoder.encode(s, "UTF-8")
+ private def urlDecode(s: String): String = URLDecoder.decode(s, "UTF-8")
+ }
+
+ object B3 {
+
+ def apply(): B3 =
+ new B3()
+
+ object Headers {
+ val TraceIdentifier = "X-B3-TraceId"
+ val ParentSpanIdentifier = "X-B3-ParentSpanId"
+ val SpanIdentifier = "X-B3-SpanId"
+ val Sampled = "X-B3-Sampled"
+ val Flags = "X-B3-Flags"
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/SpanContext.scala b/kamon-core/src/main/scala/kamon/trace/SpanContext.scala
index ae92f46d..4d013881 100644
--- a/kamon-core/src/main/scala/kamon/trace/SpanContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/SpanContext.scala
@@ -16,7 +16,7 @@
package kamon.trace
import kamon.trace.IdentityProvider.Identifier
-import kamon.trace.SpanContext.{Baggage, SamplingDecision, Source}
+import kamon.trace.SpanContext.SamplingDecision
/**
*
@@ -24,9 +24,8 @@ import kamon.trace.SpanContext.{Baggage, SamplingDecision, Source}
* @param spanID
* @param parentID
* @param samplingDecision
- * @param baggage
*/
-case class SpanContext(traceID: Identifier, spanID: Identifier, parentID: Identifier, samplingDecision: SamplingDecision, baggage: Baggage, source: Source) {
+case class SpanContext(traceID: Identifier, spanID: Identifier, parentID: Identifier, samplingDecision: SamplingDecision) {
def createChild(childSpanID: Identifier, samplingDecision: SamplingDecision): SpanContext =
this.copy(parentID = this.spanID, spanID = childSpanID)
@@ -34,73 +33,33 @@ case class SpanContext(traceID: Identifier, spanID: Identifier, parentID: Identi
object SpanContext {
- sealed trait Source
- object Source {
- case object Local extends Source
- case object Remote extends Source
- }
-
val EmptySpanContext = SpanContext(
- traceID = IdentityProvider.NoIdentifier,
- spanID = IdentityProvider.NoIdentifier,
- parentID = IdentityProvider.NoIdentifier,
- samplingDecision = SamplingDecision.DoNotSample,
- baggage = Baggage.EmptyBaggage,
- source = Source.Local
+ traceID = IdentityProvider.NoIdentifier,
+ spanID = IdentityProvider.NoIdentifier,
+ parentID = IdentityProvider.NoIdentifier,
+ samplingDecision = SamplingDecision.DoNotSample
)
sealed trait SamplingDecision
+
object SamplingDecision {
/**
- * The Trace is sampled, all child Spans should be sampled as well.
+ * The Trace is sampled, all child Spans should be sampled as well.
*/
case object Sample extends SamplingDecision
/**
- * The Trace is not sampled, none of the child Spans should be sampled.
+ * The Trace is not sampled, none of the child Spans should be sampled.
*/
case object DoNotSample extends SamplingDecision
/**
- * The sampling decision has not been taken yet, the Tracer is free to decide when creating a Span.
+ * The sampling decision has not been taken yet, the Tracer is free to decide when creating a Span.
*/
case object Unknown extends SamplingDecision
- }
- /**
- *
- */
-
- sealed trait Baggage {
- def add(key: String, value:String): Unit
- def get(key: String): Option[String]
- def getAll(): Map[String, String]
}
- object Baggage {
- def apply(): Baggage = new DefaultBaggage()
-
- case object EmptyBaggage extends Baggage {
- override def add(key: String, value: String): Unit = {}
- override def get(key: String): Option[String] = None
- override def getAll: Map[String, String] = Map.empty
- }
-
-
- final class DefaultBaggage extends Baggage {
- private var baggage: Map[String, String] = Map.empty
-
- def add(key: String, value: String): Unit = synchronized {
- baggage = baggage + (key -> value)
- }
-
- def get(key: String): Option[String] =
- baggage.get(key)
-
- def getAll: Map[String, String] =
- baggage
- }
- }
-}
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/SpanContextCodec.scala b/kamon-core/src/main/scala/kamon/trace/SpanContextCodec.scala
deleted file mode 100644
index 43b5e8e4..00000000
--- a/kamon-core/src/main/scala/kamon/trace/SpanContextCodec.scala
+++ /dev/null
@@ -1,174 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013-2017 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.trace
-
-import java.lang.StringBuilder
-import java.net.{URLDecoder, URLEncoder}
-import java.nio.ByteBuffer
-import kamon.trace.SpanContext.{Baggage, SamplingDecision, Source}
-import scala.collection.mutable
-
-trait SpanContextCodec[T] {
- def inject(spanContext: SpanContext, carrier: T): T
- def inject(spanContext: SpanContext): T
- def extract(carrier: T): Option[SpanContext]
-}
-
-object SpanContextCodec {
-
- sealed trait Format[C]
- object Format {
- case object TextMap extends Format[TextMap]
- case object HttpHeaders extends Format[TextMap]
- case object Binary extends Format[ByteBuffer]
- }
-
- class ExtendedB3(identityProvider: IdentityProvider) extends SpanContextCodec[TextMap] {
- import ExtendedB3.Headers
-
- override def inject(spanContext: SpanContext, carrier: TextMap): TextMap = {
- if(spanContext != SpanContext.EmptySpanContext) {
- carrier.put(Headers.TraceIdentifier, urlEncode(spanContext.traceID.string))
- carrier.put(Headers.SpanIdentifier, urlEncode(spanContext.spanID.string))
- carrier.put(Headers.ParentSpanIdentifier, urlEncode(spanContext.parentID.string))
- carrier.put(Headers.Baggage, encodeBaggage(spanContext.baggage))
-
- encodeSamplingDecision(spanContext.samplingDecision).foreach { samplingDecision =>
- carrier.put(Headers.Sampled, samplingDecision)
- }
-
- spanContext.baggage.get(Headers.Flags).foreach { flags =>
- carrier.put(Headers.Flags, flags)
- }
- }
-
- carrier
- }
-
- override def inject(spanContext: SpanContext): TextMap =
- inject(spanContext, TextMap.Default())
-
- override def extract(carrier: TextMap): Option[SpanContext] = {
- val traceID = carrier.get(Headers.TraceIdentifier)
- .map(id => identityProvider.traceIdentifierGenerator().from(urlDecode(id)))
- .getOrElse(IdentityProvider.NoIdentifier)
-
- val spanID = carrier.get(Headers.SpanIdentifier)
- .map(id => identityProvider.spanIdentifierGenerator().from(urlDecode(id)))
- .getOrElse(IdentityProvider.NoIdentifier)
-
- if(traceID != IdentityProvider.NoIdentifier && spanID != IdentityProvider.NoIdentifier) {
- val parentID = carrier.get(Headers.ParentSpanIdentifier)
- .map(id => identityProvider.spanIdentifierGenerator().from(urlDecode(id)))
- .getOrElse(IdentityProvider.NoIdentifier)
-
- val baggage = decodeBaggage(carrier.get(Headers.Baggage))
- val flags = carrier.get(Headers.Flags)
-
- flags.foreach { flags =>
- baggage.add(Headers.Flags, flags)
- }
-
- val samplingDecision = flags.orElse(carrier.get(Headers.Sampled)) match {
- case Some(sampled) if sampled == "1" => SamplingDecision.Sample
- case Some(sampled) if sampled == "0" => SamplingDecision.DoNotSample
- case _ => SamplingDecision.Unknown
- }
-
- Some(SpanContext(traceID, spanID, parentID, samplingDecision, baggage, Source.Remote))
-
- } else None
- }
-
- private def encodeBaggage(baggage: Baggage): String = {
- if(baggage.getAll().nonEmpty) {
- val encodedBaggage = new StringBuilder()
- baggage.getAll().foreach {
- case (key, value) =>
- if(key != Headers.Flags) {
- if (encodedBaggage.length() > 0)
- encodedBaggage.append(';')
-
- encodedBaggage
- .append(urlEncode(key))
- .append('=')
- .append(urlEncode(value))
- }
- }
-
- encodedBaggage.toString()
- } else ""
- }
-
- private def decodeBaggage(encodedBaggage: Option[String]): Baggage = {
- val baggage = Baggage()
- encodedBaggage.foreach { baggageString =>
- baggageString.split(";").foreach { group =>
- val pair = group.split("=")
- if(pair.length >= 2 && pair(0).nonEmpty) {
- baggage.add(urlDecode(pair(0)), urlDecode(pair(1)))
- }
- }
- }
-
- baggage
- }
-
- private def encodeSamplingDecision(samplingDecision: SamplingDecision): Option[String] = samplingDecision match {
- case SamplingDecision.Sample => Some("1")
- case SamplingDecision.DoNotSample => Some("0")
- case SamplingDecision.Unknown => None
- }
-
- private def urlEncode(s: String): String = URLEncoder.encode(s, "UTF-8")
- private def urlDecode(s: String): String = URLDecoder.decode(s, "UTF-8")
-
- }
-
- object ExtendedB3 {
-
- def apply(identityProvider: IdentityProvider): ExtendedB3 =
- new ExtendedB3(identityProvider)
-
- object Headers {
- val TraceIdentifier = "X-B3-TraceId"
- val ParentSpanIdentifier = "X-B3-ParentSpanId"
- val SpanIdentifier = "X-B3-SpanId"
- val Sampled = "X-B3-Sampled"
- val Flags = "X-B3-Flags"
- val Baggage = "X-B3-Extra-Baggage"
- }
- }
-}
-
-trait TextMap {
- def get(key: String): Option[String]
- def put(key: String, value: String): Unit
- def values: Iterator[(String, String)]
-}
-
-object TextMap {
- class Default extends TextMap {
- private val storage = mutable.Map.empty[String, String]
- override def get(key: String): Option[String] = storage.get(key)
- override def put(key: String, value: String): Unit = storage.put(key, value)
- override def values: Iterator[(String, String)] = storage.toIterator
- }
-
- object Default {
- def apply(): Default = new Default()
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/trace/Tracer.scala b/kamon-core/src/main/scala/kamon/trace/Tracer.scala
index bfdd561d..7d8830ca 100644
--- a/kamon-core/src/main/scala/kamon/trace/Tracer.scala
+++ b/kamon-core/src/main/scala/kamon/trace/Tracer.scala
@@ -15,13 +15,11 @@
package kamon.trace
-import java.nio.ByteBuffer
-
import com.typesafe.config.Config
-import kamon.ReporterRegistryImpl
+import kamon.{Kamon, ReporterRegistryImpl}
import kamon.metric.MetricLookup
import kamon.trace.Span.TagValue
-import kamon.trace.SpanContext.{SamplingDecision, Source}
+import kamon.trace.SpanContext.SamplingDecision
import kamon.trace.Tracer.SpanBuilder
import kamon.util.{Clock, DynamicAccess}
import org.slf4j.LoggerFactory
@@ -29,55 +27,28 @@ import org.slf4j.LoggerFactory
import scala.collection.immutable
import scala.util.Try
-trait Tracer extends ActiveSpanStorage {
+trait Tracer {
def buildSpan(operationName: String): SpanBuilder
-
- def extract[C](format: SpanContextCodec.Format[C], carrier: C): Option[SpanContext]
- def inject[C](spanContext: SpanContext, format: SpanContextCodec.Format[C], carrier: C): C
- def inject[C](spanContext: SpanContext, format: SpanContextCodec.Format[C]): C
+ def identityProvider: IdentityProvider
}
object Tracer {
final class Default(metrics: MetricLookup, reporterRegistry: ReporterRegistryImpl, initialConfig: Config) extends Tracer {
private val logger = LoggerFactory.getLogger(classOf[Tracer])
- private val activeSpanSource = ActiveSpanStorage.ThreadLocal()
private[Tracer] val tracerMetrics = new TracerMetrics(metrics)
@volatile private[Tracer] var joinRemoteParentsWithSameSpanID: Boolean = true
@volatile private[Tracer] var configuredSampler: Sampler = Sampler.Never
- @volatile private[Tracer] var identityProvider: IdentityProvider = IdentityProvider.Default()
- @volatile private[Tracer] var textMapSpanContextCodec: SpanContextCodec[TextMap] = SpanContextCodec.ExtendedB3(identityProvider)
- @volatile private[Tracer] var httpHeaderSpanContextCodec: SpanContextCodec[TextMap] = SpanContextCodec.ExtendedB3(identityProvider)
+ @volatile private[Tracer] var _identityProvider: IdentityProvider = IdentityProvider.Default()
reconfigure(initialConfig)
override def buildSpan(operationName: String): SpanBuilder =
new SpanBuilder(operationName, this, reporterRegistry)
- override def extract[C](format: SpanContextCodec.Format[C], carrier: C): Option[SpanContext] = format match {
- case SpanContextCodec.Format.HttpHeaders => httpHeaderSpanContextCodec.extract(carrier.asInstanceOf[TextMap])
- case SpanContextCodec.Format.TextMap => textMapSpanContextCodec.extract(carrier.asInstanceOf[TextMap])
- case SpanContextCodec.Format.Binary => None
- }
-
- override def inject[C](spanContext: SpanContext, format: SpanContextCodec.Format[C], carrier: C): C = format match {
- case SpanContextCodec.Format.HttpHeaders => httpHeaderSpanContextCodec.inject(spanContext, carrier.asInstanceOf[TextMap])
- case SpanContextCodec.Format.TextMap => textMapSpanContextCodec.inject(spanContext, carrier.asInstanceOf[TextMap])
- case SpanContextCodec.Format.Binary => carrier
- }
-
- override def inject[C](spanContext: SpanContext, format: SpanContextCodec.Format[C]): C = format match {
- case SpanContextCodec.Format.HttpHeaders => httpHeaderSpanContextCodec.inject(spanContext)
- case SpanContextCodec.Format.TextMap => textMapSpanContextCodec.inject(spanContext)
- case SpanContextCodec.Format.Binary => ByteBuffer.allocate(0) // TODO: Implement binary encoding.
- }
-
- override def activeSpan(): Span =
- activeSpanSource.activeSpan()
-
- override def activate(span: Span): Scope =
- activeSpanSource.activate(span)
+ override def identityProvider: IdentityProvider =
+ this._identityProvider
def sampler: Sampler =
configuredSampler
@@ -100,25 +71,9 @@ object Tracer {
traceConfig.getString("identity-provider"), immutable.Seq.empty[(Class[_], AnyRef)]
).get
- val spanContextCodecs = traceConfig.getConfig("span-context-codec")
- val newTextMapSpanContextCodec = dynamic.createInstanceFor[SpanContextCodec[TextMap]](
- spanContextCodecs.getString("text-map"), immutable.Seq((classOf[IdentityProvider], newIdentityProvider))
- ).get
-
- val newHttpHeadersSpanContextCodec = dynamic.createInstanceFor[SpanContextCodec[TextMap]](
- spanContextCodecs.getString("http-headers"), immutable.Seq((classOf[IdentityProvider], newIdentityProvider))
- ).get
-
-// val newBinarySpanContextCodec = dynamic.createInstanceFor[SpanContextCodec[TextMap]](
-// spanContextCodecs.getString("binary"), immutable.Seq((classOf[IdentityProvider], newIdentityProvider))
-// ).get // TODO: Make it happen!
-
-
configuredSampler = newSampler
joinRemoteParentsWithSameSpanID = newJoinRemoteParentsWithSameSpanID
- identityProvider = newIdentityProvider
- textMapSpanContextCodec = newTextMapSpanContextCodec
- httpHeaderSpanContextCodec = newHttpHeadersSpanContextCodec
+ _identityProvider = newIdentityProvider
}.failed.foreach {
ex => logger.error("Unable to reconfigure Kamon Tracer", ex)
@@ -132,25 +87,17 @@ object Tracer {
}
final class SpanBuilder(operationName: String, tracer: Tracer.Default, reporterRegistry: ReporterRegistryImpl) {
- private var parentContext: SpanContext = _
+ private var parentSpan: Span = _
private var startTimestamp = 0L
private var initialSpanTags = Map.empty[String, Span.TagValue]
private var initialMetricTags = Map.empty[String, String]
private var useActiveSpanAsParent = true
- def asChildOf(parentContext: SpanContext): SpanBuilder = {
- this.parentContext = parentContext
+ def asChildOf(parent: Span): SpanBuilder = {
+ if(parent != Span.Empty) this.parentSpan = parent
this
}
- def asChildOf(parentContext: Option[SpanContext]): SpanBuilder = {
- parentContext.foreach(asChildOf)
- this
- }
-
- def asChildOf(parentSpan: Span): SpanBuilder =
- asChildOf(parentSpan.context())
-
def withMetricTag(key: String, value: String): SpanBuilder = {
this.initialMetricTags = this.initialMetricTags + (key -> value)
this
@@ -185,38 +132,36 @@ object Tracer {
def start(): Span = {
val startTimestampMicros = if(startTimestamp != 0L) startTimestamp else Clock.microTimestamp()
- val parentSpanContext: Option[SpanContext] = Option(parentContext)
- .orElse(if(useActiveSpanAsParent) Some(tracer.activeSpan().context()) else None)
- .filter(spanContext => spanContext != SpanContext.EmptySpanContext)
+ val parentSpan: Option[Span] = Option(this.parentSpan)
+ .orElse(if(useActiveSpanAsParent) Some(Kamon.currentContext().get(Span.ContextKey)) else None)
+ .filter(span => span != Span.Empty)
- val samplingDecision: SamplingDecision = parentSpanContext
- .map(_.samplingDecision)
+ val samplingDecision: SamplingDecision = parentSpan
+ .map(_.context.samplingDecision)
.filter(_ != SamplingDecision.Unknown)
.getOrElse(tracer.sampler.decide(operationName, initialSpanTags))
- val spanContext = parentSpanContext match {
+ val spanContext = parentSpan match {
case Some(parent) => joinParentContext(parent, samplingDecision)
case None => newSpanContext(samplingDecision)
}
tracer.tracerMetrics.createdSpans.increment()
- Span.Real(spanContext, operationName, initialSpanTags, initialMetricTags, startTimestampMicros, reporterRegistry, tracer)
+ Span.Local(spanContext, operationName, initialSpanTags, initialMetricTags, startTimestampMicros, reporterRegistry)
}
- private def joinParentContext(parent: SpanContext, samplingDecision: SamplingDecision): SpanContext =
- if(parent.source == Source.Remote && tracer.joinRemoteParentsWithSameSpanID)
- parent.copy(samplingDecision = samplingDecision)
+ private def joinParentContext(parent: Span, samplingDecision: SamplingDecision): SpanContext =
+ if(parent.isRemote() && tracer.joinRemoteParentsWithSameSpanID)
+ parent.context().copy(samplingDecision = samplingDecision)
else
- parent.createChild(tracer.identityProvider.spanIdentifierGenerator().generate(), samplingDecision)
+ parent.context().createChild(tracer._identityProvider.spanIdGenerator().generate(), samplingDecision)
private def newSpanContext(samplingDecision: SamplingDecision): SpanContext =
SpanContext(
- traceID = tracer.identityProvider.traceIdentifierGenerator().generate(),
- spanID = tracer.identityProvider.spanIdentifierGenerator().generate(),
+ traceID = tracer._identityProvider.traceIdGenerator().generate(),
+ spanID = tracer._identityProvider.spanIdGenerator().generate(),
parentID = IdentityProvider.NoIdentifier,
- samplingDecision = samplingDecision,
- baggage = SpanContext.Baggage(),
- source = Source.Local
+ samplingDecision = samplingDecision
)
}
diff --git a/kamon-core/src/test/scala/kamon/LogInterceptor.scala b/kamon-core/src/test/scala/kamon/LogInterceptor.scala
deleted file mode 100644
index 76480a2f..00000000
--- a/kamon-core/src/test/scala/kamon/LogInterceptor.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013-2017 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-
-package kamon
-
-//import uk.org.lidalia.slf4jext.Level
-//import uk.org.lidalia.slf4jtest.{LoggingEvent, TestLogger}
-//
-//trait LogInterceptor {
-//
-// def interceptLog[T](level: Level)(code: => T)(implicit tl: TestLogger): Seq[LoggingEvent] = {
-// import scala.collection.JavaConverters._
-// tl.clear()
-// val run = code
-// tl.getLoggingEvents().asScala.filter(_.getLevel == level)
-// }
-//}
diff --git a/kamon-core/src/test/scala/kamon/context/ContextCodecSpec.scala b/kamon-core/src/test/scala/kamon/context/ContextCodecSpec.scala
new file mode 100644
index 00000000..11be85a7
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/context/ContextCodecSpec.scala
@@ -0,0 +1,18 @@
+package kamon.context
+
+import kamon.Kamon
+import org.scalatest.{Matchers, WordSpec}
+
+class ContextCodecSpec extends WordSpec with Matchers {
+ "the Context Codec" when {
+ "encoding/decoding to HttpHeaders" should {
+ "encode stuff" in {
+
+
+
+ }
+ }
+ }
+
+ val ContextCodec = new Codec(Kamon.identityProvider, Kamon.config())
+}
diff --git a/kamon-core/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala b/kamon-core/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala
new file mode 100644
index 00000000..39f316ba
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/context/ThreadLocalStorageSpec.scala
@@ -0,0 +1,41 @@
+package kamon.context
+
+
+import org.scalatest.{Matchers, WordSpec}
+
+class ThreadLocalStorageSpec extends WordSpec with Matchers {
+
+ "the Storage.ThreadLocal implementation of Context storage" should {
+ "return a empty context when no context has been set" in {
+ TLS.current() shouldBe Context.Empty
+ }
+
+ "return the empty value for keys that have not been set in the context" in {
+ TLS.current().get(TestKey) shouldBe 42
+ TLS.current().get(AnotherKey) shouldBe 99
+ TLS.current().get(BroadcastKey) shouldBe "i travel around"
+
+ ScopeWithKey.get(TestKey) shouldBe 43
+ ScopeWithKey.get(AnotherKey) shouldBe 99
+ ScopeWithKey.get(BroadcastKey) shouldBe "i travel around"
+ }
+
+ "allow setting a context as current and remove it when closing the Scope" in {
+ TLS.current() shouldBe Context.Empty
+
+ val scope = TLS.store(ScopeWithKey)
+ TLS.current() shouldBe theSameInstanceAs(ScopeWithKey)
+ scope.close()
+
+ TLS.current() shouldBe Context.Empty
+ }
+
+
+ }
+
+ val TLS: Storage = new Storage.ThreadLocal
+ val TestKey = Key.local("test-key", 42)
+ val AnotherKey = Key.local("another-key", 99)
+ val BroadcastKey = Key.broadcast("broadcast", "i travel around")
+ val ScopeWithKey = Context.create().withKey(TestKey, 43)
+}
diff --git a/kamon-core/src/test/scala/kamon/testkit/SpanBuilding.scala b/kamon-core/src/test/scala/kamon/testkit/SpanBuilding.scala
index 9b845ac9..7a216ecc 100644
--- a/kamon-core/src/test/scala/kamon/testkit/SpanBuilding.scala
+++ b/kamon-core/src/test/scala/kamon/testkit/SpanBuilding.scala
@@ -1,20 +1,16 @@
package kamon.testkit
-import kamon.trace.SpanContext.{SamplingDecision, Source}
-import kamon.trace.{IdentityProvider, SpanContext, SpanContextCodec}
+import kamon.trace.SpanContext.SamplingDecision
+import kamon.trace.{IdentityProvider, SpanContext}
trait SpanBuilding {
private val identityProvider = IdentityProvider.Default()
- private val extendedB3Codec = SpanContextCodec.ExtendedB3(identityProvider)
def createSpanContext(samplingDecision: SamplingDecision = SamplingDecision.Sample): SpanContext =
SpanContext(
- traceID = identityProvider.traceIdentifierGenerator().generate(),
- spanID = identityProvider.spanIdentifierGenerator().generate(),
- parentID = identityProvider.spanIdentifierGenerator().generate(),
- samplingDecision = samplingDecision,
- baggage = SpanContext.Baggage(),
- source = Source.Local
+ traceID = identityProvider.traceIdGenerator().generate(),
+ spanID = identityProvider.spanIdGenerator().generate(),
+ parentID = identityProvider.spanIdGenerator().generate(),
+ samplingDecision = samplingDecision
)
-
}
diff --git a/kamon-core/src/test/scala/kamon/testkit/SpanInspector.scala b/kamon-core/src/test/scala/kamon/testkit/SpanInspector.scala
index ab58e446..f23fba98 100644
--- a/kamon-core/src/test/scala/kamon/testkit/SpanInspector.scala
+++ b/kamon-core/src/test/scala/kamon/testkit/SpanInspector.scala
@@ -10,10 +10,10 @@ import scala.util.Try
class SpanInspector(span: Span) {
private val (realSpan, spanData) = Try {
val realSpan = span match {
- case _: Span.Real => span
+ case _: Span.Local => span
}
- val spanData = invoke[Span.Real, FinishedSpan](realSpan, "toFinishedSpan", classOf[Long] -> Long.box(Clock.microTimestamp()))
+ val spanData = invoke[Span.Local, FinishedSpan](realSpan, "toFinishedSpan", classOf[Long] -> Long.box(Clock.microTimestamp()))
(realSpan, spanData)
}.getOrElse((null, null))
@@ -27,10 +27,10 @@ class SpanInspector(span: Span) {
spanData.tags
def metricTags(): Map[String, String] =
- getField[Span.Real, Map[String, String]](realSpan, "customMetricTags")
+ getField[Span.Local, Map[String, String]](realSpan, "customMetricTags")
def startTimestamp(): Long =
- getField[Span.Real, Long](realSpan, "startTimestampMicros")
+ getField[Span.Local, Long](realSpan, "startTimestampMicros")
def context(): SpanContext =
spanData.context
diff --git a/kamon-core/src/test/scala/kamon/trace/ActiveSpanManagementSpec.scala b/kamon-core/src/test/scala/kamon/trace/ActiveSpanManagementSpec.scala
deleted file mode 100644
index a6a7bc3a..00000000
--- a/kamon-core/src/test/scala/kamon/trace/ActiveSpanManagementSpec.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package kamon.trace
-
-import kamon.Kamon
-import kamon.testkit.SpanInspector
-import kamon.trace.Span.Annotation
-import kamon.util.Clock
-import org.scalatest.{Matchers, WordSpec}
-
-class ActiveSpanManagementSpec extends WordSpec with Matchers {
-
- "Kamon acting as a ActiveSpanSource" should {
- "return a empty span when there is no currently active Span" in {
- inspect(Kamon.activeSpan()) shouldBe empty
- }
-
- "safely operate on a empty Span" in {
- val emptySpan = Kamon.activeSpan()
- val activeSpanData = inspect(Kamon.activeSpan())
- activeSpanData shouldBe empty
-
- emptySpan
- .setOperationName("test")
- .addBaggage("key", "value")
- .addMetricTag("key", "value")
- .addSpanTag("string", "string")
- .addSpanTag("number", 42)
- .addSpanTag("boolean-true", true)
- .addSpanTag("boolean-false", false)
- .annotate(Annotation(Clock.microTimestamp(), "event", Map("k" -> "v")))
-
- val baggage = emptySpan.context().baggage
- baggage.add("key", "value")
- baggage.get("key") shouldBe empty
- baggage.getAll() shouldBe empty
-
- Kamon.withActiveSpan(emptySpan) {
- inspect(Kamon.activeSpan()) shouldBe empty
- }
-
- inspect(Kamon.activeSpan()) shouldBe empty
- }
-
- "set a Span as active when using activate" in {
- val span = Kamon.buildSpan("mySpan").start()
- val scope = Kamon.activate(span)
- Kamon.activeSpan() shouldBe theSameInstanceAs(span)
- scope.close()
- }
-
- "restore the previously active Span when a scope is closed" in {
- val previouslyActiveSpan = Kamon.activeSpan()
- inspect(Kamon.activeSpan()) shouldBe empty
-
- val span = Kamon.buildSpan("mySpan").start()
- Kamon.withActiveSpan(span) {
- Kamon.activeSpan() shouldBe theSameInstanceAs(span)
- }
-
- Kamon.activeSpan() shouldBe theSameInstanceAs(previouslyActiveSpan)
- }
- }
-
- def inspect(span: Span): SpanInspector =
- SpanInspector(span)
-}
diff --git a/kamon-core/src/test/scala/kamon/trace/ExtendedB3SpanContextCodecSpec.scala b/kamon-core/src/test/scala/kamon/trace/B3SpanCodecSpec.scala
index 24cb7ef5..e6fa283e 100644
--- a/kamon-core/src/test/scala/kamon/trace/ExtendedB3SpanContextCodecSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/B3SpanCodecSpec.scala
@@ -16,50 +16,34 @@
package kamon.trace
+import kamon.context.{Context, TextMap}
import kamon.testkit.SpanBuilding
import kamon.trace.IdentityProvider.Identifier
import kamon.trace.SpanContext.SamplingDecision
import org.scalatest.{Matchers, OptionValues, WordSpecLike}
-class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with OptionValues with SpanBuilding {
- val identityProvider = IdentityProvider.Default()
- val extendedB3Codec = SpanContextCodec.ExtendedB3(identityProvider)
+class B3SpanCodecSpec extends WordSpecLike with Matchers with OptionValues with SpanBuilding {
+ val extendedB3Codec = SpanCodec.B3()
"The ExtendedB3 SpanContextCodec" should {
"return a TextMap containing the SpanContext data" in {
- val context = testSpanContext()
- context.baggage.add("some", "baggage")
- context.baggage.add("more", "baggage")
+ val context = testContext()
- val textMap = extendedB3Codec.inject(context)
+ val textMap = extendedB3Codec.encode(context)
textMap.get("X-B3-TraceId").value shouldBe "1234"
textMap.get("X-B3-ParentSpanId").value shouldBe "2222"
textMap.get("X-B3-SpanId").value shouldBe "4321"
textMap.get("X-B3-Sampled").value shouldBe "1"
- textMap.get("X-B3-Extra-Baggage").value shouldBe "some=baggage;more=baggage"
}
- "allow to provide the TextMap to be used for encoding" in {
- val context = testSpanContext()
- context.baggage.add("some", "baggage")
- context.baggage.add("more", "baggage")
- val textMap = TextMap.Default()
- extendedB3Codec.inject(context, textMap)
- textMap.get("X-B3-TraceId").value shouldBe "1234"
- textMap.get("X-B3-ParentSpanId").value shouldBe "2222"
- textMap.get("X-B3-SpanId").value shouldBe "4321"
- textMap.get("X-B3-Sampled").value shouldBe "1"
- textMap.get("X-B3-Extra-Baggage").value shouldBe "some=baggage;more=baggage"
- }
-
- "not inject anything if the SpanContext is empty" in {
- val textMap = extendedB3Codec.inject(SpanContext.EmptySpanContext)
+ "not inject anything if there is no Span in the Context" in {
+ val textMap = extendedB3Codec.encode(Context.Empty)
textMap.values shouldBe empty
}
- "extract a SpanContext from a TextMap when all fields are set" in {
+ "extract a RemoteSpan from a TextMap when all fields are set" in {
val textMap = TextMap.Default()
textMap.put("X-B3-TraceId", "1234")
textMap.put("X-B3-ParentSpanId", "2222")
@@ -67,15 +51,11 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
textMap.put("X-B3-Sampled", "1")
textMap.put("X-B3-Extra-Baggage", "some=baggage;more=baggage")
- val spanContext = extendedB3Codec.extract(textMap).value
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
spanContext.traceID.string shouldBe "1234"
spanContext.spanID.string shouldBe "4321"
spanContext.parentID.string shouldBe "2222"
spanContext.samplingDecision shouldBe SamplingDecision.Sample
- spanContext.baggage.getAll() should contain allOf(
- "some" -> "baggage",
- "more" -> "baggage"
- )
}
"decode the sampling decision based on the X-B3-Sampled header" in {
@@ -93,19 +73,27 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
noSamplingTextMap.put("X-B3-TraceId", "1234")
noSamplingTextMap.put("X-B3-SpanId", "4321")
- extendedB3Codec.extract(sampledTextMap).value.samplingDecision shouldBe SamplingDecision.Sample
- extendedB3Codec.extract(notSampledTextMap).value.samplingDecision shouldBe SamplingDecision.DoNotSample
- extendedB3Codec.extract(noSamplingTextMap).value.samplingDecision shouldBe SamplingDecision.Unknown
+ extendedB3Codec.decode(sampledTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.Sample
+
+ extendedB3Codec.decode(notSampledTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.DoNotSample
+
+ extendedB3Codec.decode(noSamplingTextMap, Context.Empty)
+ .get(Span.ContextKey).context().samplingDecision shouldBe SamplingDecision.Unknown
}
"not include the X-B3-Sampled header if the sampling decision is unknown" in {
- val sampledSpanContext = testSpanContext()
- val notSampledSpanContext = testSpanContext().copy(samplingDecision = SamplingDecision.DoNotSample)
- val unknownSamplingSpanContext = testSpanContext().copy(samplingDecision = SamplingDecision.Unknown)
-
- extendedB3Codec.inject(sampledSpanContext).get("X-B3-Sampled").value shouldBe("1")
- extendedB3Codec.inject(notSampledSpanContext).get("X-B3-Sampled").value shouldBe("0")
- extendedB3Codec.inject(unknownSamplingSpanContext).get("X-B3-Sampled") shouldBe empty
+ val context = testContext()
+ val sampledSpanContext = context.get(Span.ContextKey).context()
+ val notSampledSpanContext = Context.Empty.withKey(Span.ContextKey,
+ Span.Remote(sampledSpanContext.copy(samplingDecision = SamplingDecision.DoNotSample)))
+ val unknownSamplingSpanContext = Context.Empty.withKey(Span.ContextKey,
+ Span.Remote(sampledSpanContext.copy(samplingDecision = SamplingDecision.Unknown)))
+
+ extendedB3Codec.encode(context).get("X-B3-Sampled").value shouldBe("1")
+ extendedB3Codec.encode(notSampledSpanContext).get("X-B3-Sampled").value shouldBe("0")
+ extendedB3Codec.encode(unknownSamplingSpanContext).get("X-B3-Sampled") shouldBe empty
}
"use the Debug flag to override the sampling decision, if provided." in {
@@ -115,7 +103,7 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
textMap.put("X-B3-Sampled", "0")
textMap.put("X-B3-Flags", "1")
- val spanContext = extendedB3Codec.extract(textMap).value
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
spanContext.samplingDecision shouldBe SamplingDecision.Sample
}
@@ -125,7 +113,7 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
textMap.put("X-B3-SpanId", "4321")
textMap.put("X-B3-Flags", "1")
- val spanContext = extendedB3Codec.extract(textMap).value
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
spanContext.samplingDecision shouldBe SamplingDecision.Sample
}
@@ -134,12 +122,11 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
textMap.put("X-B3-TraceId", "1234")
textMap.put("X-B3-SpanId", "4321")
- val spanContext = extendedB3Codec.extract(textMap).value
+ val spanContext = extendedB3Codec.decode(textMap, Context.Empty).get(Span.ContextKey).context()
spanContext.traceID.string shouldBe "1234"
spanContext.spanID.string shouldBe "4321"
spanContext.parentID shouldBe IdentityProvider.NoIdentifier
spanContext.samplingDecision shouldBe SamplingDecision.Unknown
- spanContext.baggage.getAll() shouldBe empty
}
"do not extract a SpanContext if Trace ID and Span ID are not provided" in {
@@ -157,34 +144,26 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
noIds.put("X-B3-Sampled", "0")
noIds.put("X-B3-Flags", "1")
- extendedB3Codec.extract(onlyTraceID) shouldBe empty
- extendedB3Codec.extract(onlySpanID) shouldBe empty
- extendedB3Codec.extract(noIds) shouldBe empty
+ extendedB3Codec.decode(onlyTraceID, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
+ extendedB3Codec.decode(onlySpanID, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
+ extendedB3Codec.decode(noIds, Context.Empty).get(Span.ContextKey) shouldBe Span.Empty
}
- "round trip a SpanContext from TextMap -> SpanContext -> TextMap" in {
+ "round trip a Span from TextMap -> Context -> TextMap" in {
val textMap = TextMap.Default()
textMap.put("X-B3-TraceId", "1234")
textMap.put("X-B3-ParentSpanId", "2222")
textMap.put("X-B3-SpanId", "4321")
textMap.put("X-B3-Sampled", "1")
- textMap.put("X-B3-Extra-Baggage", "some=baggage;more=baggage")
- val spanContext = extendedB3Codec.extract(textMap).value
- val injectTextMap = extendedB3Codec.inject(spanContext)
+ val context = extendedB3Codec.decode(textMap, Context.Empty)
+ val injectTextMap = extendedB3Codec.encode(context)
textMap.values.toSeq should contain theSameElementsAs(injectTextMap.values.toSeq)
}
- "round trip a baggage that has special characters in there" in {
- val spanContext = testSpanContext()
- spanContext.baggage.add("key-with-!specials", "value=with~spec;als")
-
- val textMap = extendedB3Codec.inject(spanContext)
- val extractedSpanContext = extendedB3Codec.extract(textMap).value
- extractedSpanContext.baggage.getAll().values.toSeq should contain theSameElementsAs(spanContext.baggage.getAll().values.toSeq)
- }
-
+ /*
+ // TODO: Should we be supporting this use case? maybe even have the concept of Debug requests ourselves?
"internally carry the X-B3-Flags value so that it can be injected in outgoing requests" in {
val textMap = TextMap.Default()
textMap.put("X-B3-TraceId", "1234")
@@ -192,19 +171,22 @@ class ExtendedB3SpanContextCodecSpec extends WordSpecLike with Matchers with Opt
textMap.put("X-B3-SpanId", "4321")
textMap.put("X-B3-Sampled", "1")
textMap.put("X-B3-Flags", "1")
- textMap.put("X-B3-Extra-Baggage", "some=baggage;more=baggage")
val spanContext = extendedB3Codec.extract(textMap).value
val injectTextMap = extendedB3Codec.inject(spanContext)
injectTextMap.get("X-B3-Flags").value shouldBe("1")
- }
+ }*/
}
- def testSpanContext(): SpanContext =
- createSpanContext().copy(
+ def testContext(): Context = {
+ val spanContext = createSpanContext().copy(
traceID = Identifier("1234", Array[Byte](1, 2, 3, 4)),
spanID = Identifier("4321", Array[Byte](4, 3, 2, 1)),
parentID = Identifier("2222", Array[Byte](2, 2, 2, 2))
)
+
+ Context.create().withKey(Span.ContextKey, Span.Remote(spanContext))
+ }
+
} \ No newline at end of file
diff --git a/kamon-core/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala b/kamon-core/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala
index 8977e3cd..8f9af7b0 100644
--- a/kamon-core/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/DefaultIdentityGeneratorSpec.scala
@@ -6,8 +6,8 @@ import org.scalactic.TimesOnInt._
class DefaultIdentityGeneratorSpec extends WordSpecLike with Matchers with OptionValues {
val idProvider = IdentityProvider.Default()
- val traceGenerator = idProvider.traceIdentifierGenerator()
- val spanGenerator = idProvider.spanIdentifierGenerator()
+ val traceGenerator = idProvider.traceIdGenerator()
+ val spanGenerator = idProvider.spanIdGenerator()
validateGenerator("TraceID Generator", traceGenerator)
validateGenerator("SpanID Generator", spanGenerator)
diff --git a/kamon-core/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala b/kamon-core/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala
index 54e590ad..b22f17e1 100644
--- a/kamon-core/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/DoubleLengthTraceIdentityGeneratorSpec.scala
@@ -6,8 +6,8 @@ import org.scalatest.{Matchers, OptionValues, WordSpecLike}
class DoubleLengthTraceIdentityGeneratorSpec extends WordSpecLike with Matchers with OptionValues {
val idProvider = IdentityProvider.DoubleSizeTraceID()
- val traceGenerator = idProvider.traceIdentifierGenerator()
- val spanGenerator = idProvider.spanIdentifierGenerator()
+ val traceGenerator = idProvider.traceIdGenerator()
+ val spanGenerator = idProvider.spanIdGenerator()
"The DoubleSizeTraceID identity provider" when {
"generating trace identifiers" should {
diff --git a/kamon-core/src/test/scala/kamon/trace/RealSpanSpec.scala b/kamon-core/src/test/scala/kamon/trace/LocalSpanSpec.scala
index e019e15a..e24f8727 100644
--- a/kamon-core/src/test/scala/kamon/trace/RealSpanSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/LocalSpanSpec.scala
@@ -8,7 +8,7 @@ import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, Matchers, OptionValues, WordSpec}
import org.scalatest.time.SpanSugar._
-class RealSpanSpec extends WordSpec with Matchers with BeforeAndAfterAll with Eventually with OptionValues
+class LocalSpanSpec extends WordSpec with Matchers with BeforeAndAfterAll with Eventually with OptionValues
with Reconfigure with MetricInspection {
"a real span" when {
@@ -37,7 +37,6 @@ class RealSpanSpec extends WordSpec with Matchers with BeforeAndAfterAll with Ev
.withSpanTag("builder-number-tag", 42)
.withStartTimestamp(100)
.start()
- .addBaggage("baggage", "value")
.addSpanTag("span-string-tag", "value")
.addSpanTag("span-boolean-tag-true", true)
.addSpanTag("span-boolean-tag-false", false)
@@ -81,87 +80,8 @@ class RealSpanSpec extends WordSpec with Matchers with BeforeAndAfterAll with Ev
val customAnnotationTwo = annotations("custom-annotation-2").head
customAnnotationTwo.timestampMicros shouldBe (4201)
customAnnotationTwo.fields shouldBe (Map("custom" -> "yes-2"))
-
- finishedSpan.context.baggage.getAll() should contain(
- "baggage" -> "value"
- )
- }
- }
-
- "pass all the tags, annotations and baggage to the FinishedSpan instance when started, activated and finished" in {
- val scope = Kamon.activate(Kamon.buildSpan("full-span")
- .withSpanTag("builder-string-tag", "value")
- .withSpanTag("builder-boolean-tag-true", true)
- .withSpanTag("builder-boolean-tag-false", false)
- .withSpanTag("builder-number-tag", 42)
- .withStartTimestamp(100)
- .start())
-
- Kamon.activeSpan()
- .addBaggage("baggage", "value")
- .addSpanTag("span-string-tag", "value")
- .addSpanTag("span-boolean-tag-true", true)
- .addSpanTag("span-boolean-tag-false", false)
- .addSpanTag("span-number-tag", 42)
- .annotate("simple-annotation")
- .annotate("regular-annotation", Map("data" -> "something"))
- .annotate(4200, "custom-annotation-1", Map("custom" -> "yes-1"))
- .annotate(Annotation(4201, "custom-annotation-2", Map("custom" -> "yes-2")))
- .setOperationName("fully-populated-active-span")
- .finish(200)
-
- scope.close()
-
- eventually(timeout(2 seconds)) {
- val finishedSpan = reporter.nextSpan().value
- finishedSpan.operationName shouldBe ("fully-populated-active-span")
- finishedSpan.startTimestampMicros shouldBe 100
- finishedSpan.endTimestampMicros shouldBe 200
- finishedSpan.tags should contain allOf(
- "builder-string-tag" -> TagValue.String("value"),
- "builder-boolean-tag-true" -> TagValue.True,
- "builder-boolean-tag-false" -> TagValue.False,
- "builder-number-tag" -> TagValue.Number(42),
- "span-string-tag" -> TagValue.String("value"),
- "span-boolean-tag-true" -> TagValue.True,
- "span-boolean-tag-false" -> TagValue.False,
- "span-number-tag" -> TagValue.Number(42)
- )
-
- finishedSpan.annotations.length shouldBe (4)
- val annotations = finishedSpan.annotations.groupBy(_.name)
- annotations.keys should contain allOf(
- "simple-annotation",
- "regular-annotation",
- "custom-annotation-1",
- "custom-annotation-2"
- )
-
- val customAnnotationOne = annotations("custom-annotation-1").head
- customAnnotationOne.timestampMicros shouldBe (4200)
- customAnnotationOne.fields shouldBe (Map("custom" -> "yes-1"))
-
- val customAnnotationTwo = annotations("custom-annotation-2").head
- customAnnotationTwo.timestampMicros shouldBe (4201)
- customAnnotationTwo.fields shouldBe (Map("custom" -> "yes-2"))
-
- finishedSpan.context.baggage.getAll() should contain(
- "baggage" -> "value"
- )
}
}
-
- "allow storing and retrieving baggage items" in {
- val span = Kamon.buildSpan("span-with-baggage").start()
- span.addBaggage("my-baggage", "value-1")
- span.addBaggage("my-baggage", "value-2")
- span.addBaggage("my-other-baggage", "value-3")
-
- span.context().baggage.getAll() should contain only(
- "my-baggage" -> "value-2",
- "my-other-baggage" -> "value-3"
- )
- }
}
}
diff --git a/kamon-core/src/test/scala/kamon/trace/TracerSpec.scala b/kamon-core/src/test/scala/kamon/trace/TracerSpec.scala
index 5abfe723..fb5bb313 100644
--- a/kamon-core/src/test/scala/kamon/trace/TracerSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/TracerSpec.scala
@@ -2,10 +2,9 @@ package kamon.trace
import com.typesafe.config.ConfigFactory
import kamon.Kamon
+import kamon.context.Context
import kamon.testkit.{SpanBuilding, SpanInspector}
import kamon.trace.Span.TagValue
-import kamon.trace.SpanContext.Source
-import kamon.trace.SpanContextCodec.Format
import org.scalatest.{Matchers, OptionValues, WordSpec}
class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionValues {
@@ -42,33 +41,16 @@ class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionVal
("boolean" -> TagValue.True))
}
-// "do not interfere with the currently active Span if not requested when starting a Span" in {
-// val previouslyActiveSpan = tracer.activeSpan()
-// tracer.buildSpan("myOperation").start()
-// tracer.activeSpan() should be theSameInstanceAs(previouslyActiveSpan)
-// }
-//
-// "make a span active with started with the .startActive() function and restore the previous Span when deactivated" in {
-// val previouslyActiveSpan = tracer.activeSpan()
-// val activeSpan = tracer.buildSpan("myOperation").startActive()
-//
-// tracer.activeSpan() shouldNot be theSameInstanceAs(previouslyActiveSpan)
-// val activeSpanData = inspect(activeSpan)
-// activeSpanData.operationName() shouldBe "myOperation"
-//
-// activeSpan.deactivate()
-// tracer.activeSpan() should be theSameInstanceAs(previouslyActiveSpan)
-// }
-
"not have any parent Span if there is ActiveSpan and no parent was explicitly given" in {
val span = tracer.buildSpan("myOperation").start()
val spanData = inspect(span)
spanData.context().parentID shouldBe IdentityProvider.NoIdentifier
}
- "use the currently active span as parent" in {
+
+ "automatically take the Span from the current Context as parent" in {
val parent = tracer.buildSpan("myOperation").start()
- val child = Kamon.withActiveSpan(parent) {
+ val child = Kamon.withContext(Context.create(Span.ContextKey, parent)) {
tracer.buildSpan("childOperation").asChildOf(parent).start()
}
@@ -79,7 +61,7 @@ class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionVal
"ignore the currently active span as parent if explicitly requested" in {
val parent = tracer.buildSpan("myOperation").start()
- val child = Kamon.withActiveSpan(parent) {
+ val child = Kamon.withContext(Context.create(Span.ContextKey, parent)) {
tracer.buildSpan("childOperation").ignoreActiveSpan().start()
}
@@ -93,53 +75,6 @@ class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionVal
spanData.startTimestamp() shouldBe 100
}
- "inject and extract a SpanContext from a TextMap carrier" in {
- val spanContext = createSpanContext()
- val injected = Kamon.inject(spanContext, Format.TextMap)
- val extractedSpanContext = Kamon.extract(Format.TextMap, injected).value
-
- spanContext.traceID shouldBe(extractedSpanContext.traceID)
- spanContext.spanID shouldBe(extractedSpanContext.spanID)
- spanContext.parentID shouldBe(extractedSpanContext.parentID)
- spanContext.baggage.getAll() shouldBe(extractedSpanContext.baggage.getAll())
- }
-
- "inject and extract a SpanContext from a TextMap carrier supplied by the caller" in {
- val spanContext = createSpanContext()
- val carrier = TextMap.Default()
- Kamon.inject(spanContext, Format.TextMap, carrier)
- val extractedSpanContext = Kamon.extract(Format.TextMap, carrier).value
-
- spanContext.traceID shouldBe(extractedSpanContext.traceID)
- spanContext.spanID shouldBe(extractedSpanContext.spanID)
- spanContext.parentID shouldBe(extractedSpanContext.parentID)
- spanContext.baggage.getAll() shouldBe(extractedSpanContext.baggage.getAll())
- }
-
- "inject and extract a SpanContext from a HttpHeaders carrier" in {
- val spanContext = createSpanContext()
- val injected = Kamon.inject(spanContext, Format.HttpHeaders)
- val extractedSpanContext = Kamon.extract(Format.HttpHeaders, injected).value
-
- spanContext.traceID shouldBe(extractedSpanContext.traceID)
- spanContext.spanID shouldBe(extractedSpanContext.spanID)
- spanContext.parentID shouldBe(extractedSpanContext.parentID)
- spanContext.baggage.getAll() shouldBe(extractedSpanContext.baggage.getAll())
- }
-
- "inject and extract a SpanContext from a HttpHeaders using a TextMap provided by the caller" in {
- val spanContext = createSpanContext()
- val carrier = TextMap.Default()
- Kamon.inject(spanContext, Format.HttpHeaders, carrier)
- val extractedSpanContext = Kamon.extract(Format.HttpHeaders, carrier).value
-
- spanContext.traceID shouldBe(extractedSpanContext.traceID)
- spanContext.spanID shouldBe(extractedSpanContext.spanID)
- spanContext.parentID shouldBe(extractedSpanContext.parentID)
- spanContext.baggage.getAll() shouldBe(extractedSpanContext.baggage.getAll())
- }
-
-
"preserve the same Span and Parent identifier when creating a Span with a remote parent if join-remote-parents-with-same-span-id is enabled" in {
val previousConfig = Kamon.config()
@@ -148,12 +83,12 @@ class TracerSpec extends WordSpec with Matchers with SpanBuilding with OptionVal
.withFallback(Kamon.config())
}
- val remoteParent = createSpanContext().copy(source = Source.Remote)
+ val remoteParent = Span.Remote(createSpanContext())
val childData = inspect(tracer.buildSpan("local").asChildOf(remoteParent).start())
- childData.context().traceID shouldBe remoteParent.traceID
- childData.context().parentID shouldBe remoteParent.parentID
- childData.context().spanID shouldBe remoteParent.spanID
+ childData.context().traceID shouldBe remoteParent.context.traceID
+ childData.context().parentID shouldBe remoteParent.context.parentID
+ childData.context().spanID shouldBe remoteParent.context.spanID
Kamon.reconfigure(previousConfig)
}