aboutsummaryrefslogtreecommitdiff
path: root/kamon-core/src/main/scala/kamon/context/BinaryPropagation.scala
blob: 75e65c44f974a16ff7717d25dbb9fcdfc3afc854 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
/* =========================================================================================
 * Copyright © 2013-2018 the kamon project <http://kamon.io/>
 *
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
 * except in compliance with the License. You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software distributed under the
 * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
 * either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 * =========================================================================================
 */

package kamon
package context

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream}

import com.typesafe.config.Config
import kamon.context.BinaryPropagation.{ByteStreamReader, ByteStreamWriter}
import kamon.context.generated.binary.context.{Context => ColferContext, Entry => ColferEntry}
import org.slf4j.LoggerFactory

import scala.reflect.ClassTag
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}


/**
  * Context propagation that uses byte stream abstractions as the transport medium. The Binary propagation uses
  * instances of [[ByteStreamReader]] and [[ByteStreamWriter]] to decode and encode Context instances, respectively.
  *
  * Binary propagation uses the [[ByteStreamReader]] and [[ByteStreamWriter]] abstraction which closely model the APIs
  * from [[InputStream]] and [[OutputStream]], but without exposing additional functionality that wouldn't have any
  * well defined behavior for Context propagation, e.g. flush or close functions on OutputStreams.
  */
object BinaryPropagation {

  /**
    * Represents a readable stream of bytes. This interface closely resembles [[InputStream]], minus the functionality
    * that wouldn't have a clearly defined behavior in the context of Context propagation.
    */
  trait ByteStreamReader {
    /**
      * Number of available bytes on the ByteStream.
      */
    def available(): Int

    /**
      * Reads as many bytes as possible into the target byte array.
      *
      * @param target Target buffer in which the read bytes will be written.
      * @return The number of bytes written into the target buffer.
      */
    def read(target: Array[Byte]): Int

    /**
      * Reads a specified number of bytes into the target buffer, starting from the offset position.
      *
      * @param target Target buffer in which read bytes will be written.
      * @param offset Offset index in which to start writing bytes on the target buffer.
      * @param count Number of bytes to be read.
      * @return The number of bytes written into the target buffer.
      */
    def read(target: Array[Byte], offset: Int, count: Int): Int

    /**
      * Reads all available bytes into a newly created byte array.
      *
      * @return All bytes read.
      */
    def readAll(): Array[Byte]
  }


  object ByteStreamReader {

    /**
      * Creates a new [[ByteStreamReader]] that reads data from a byte array.
      */
    def of(bytes: Array[Byte]): ByteStreamReader = new ByteArrayInputStream(bytes) with ByteStreamReader {
      override def readAll(): Array[Byte] = {
        val target = Array.ofDim[Byte](available())
        read(target, 0, available())
        target
      }
    }
  }


  /**
    * Represents a writable stream of bytes. This interface closely resembles [[OutputStream]], minus the functionality
    * that wouldn't have a clearly defined behavior in the context of Context propagation.
    */
  trait ByteStreamWriter {

    /**
      * Writes all bytes into the stream.
      */
    def write(bytes: Array[Byte]): Unit

    /**
      * Writes a portion of the provided bytes into the stream.
      *
      * @param bytes Buffer from which data will be selected.
      * @param offset Starting index on the buffer.
      * @param count Number of bytes to write into the stream.
      */
    def write(bytes: Array[Byte], offset: Int, count: Int): Unit

    /**
      * Write a single byte into the stream.
      */
    def write(byte: Int): Unit

  }

  object ByteStreamWriter {

    /**
      * Creates a new [[ByteStreamWriter]] from an OutputStream.
      */
    def of(outputStream: OutputStream): ByteStreamWriter = new ByteStreamWriter {
      override def write(bytes: Array[Byte]): Unit =
        outputStream.write(bytes)

      override def write(bytes: Array[Byte], offset: Int, count: Int): Unit =
        outputStream.write(bytes, offset, count)

      override def write(byte: Int): Unit =
        outputStream.write(byte)
    }
  }



  /**
    * Create a new default Binary Propagation instance from the provided configuration.
    *
    * @param config Binary Propagation channel configuration
    * @return A newly constructed HttpPropagation instance.
    */
  def from(config: Config, classLoading: ClassLoading): Propagation[ByteStreamReader, ByteStreamWriter] = {
    new BinaryPropagation.Default(Settings.from(config, classLoading))
  }

  /**
    * Default Binary propagation in Kamon. This implementation uses Colfer to read and write the context tags and
    * entries. Entries are represented as simple pairs of entry name and bytes, which are then processed by the all
    * configured entry readers and writers.
    */
  class Default(settings: Settings) extends Propagation[ByteStreamReader, ByteStreamWriter] {
    private val _log = LoggerFactory.getLogger(classOf[BinaryPropagation.Default])
    private val _streamPool = new ThreadLocal[Default.ReusableByteStreamWriter] {
      override def initialValue(): Default.ReusableByteStreamWriter = new Default.ReusableByteStreamWriter(128)
    }

    private val _contextBufferPool = new ThreadLocal[Array[Byte]] {
      override def initialValue(): Array[Byte] = Array.ofDim[Byte](settings.maxOutgoingSize)
    }

    override def read(reader: ByteStreamReader): Context = {
      if(reader.available() > 0) {
        val contextData = Try {
          val cContext = new ColferContext()
          cContext.unmarshal(reader.readAll(), 0)
          cContext
        }

        contextData.failed.foreach {
          case NonFatal(t) => _log.warn("Failed to read Context from ByteStreamReader", t)
        }

        contextData.map { colferContext =>

          // Context tags
          var tagSectionsCount = colferContext.tags.length
          if (tagSectionsCount > 0 && tagSectionsCount % 2 != 0) {
            _log.warn("Malformed Context tags found, tags consistency might be compromised")
            tagSectionsCount -= 1
          }

          val tags = if (tagSectionsCount > 0) {
            val tagsBuilder = Map.newBuilder[String, String]
            var tagIndex = 0
            while (tagIndex < tagSectionsCount) {
              tagsBuilder += (colferContext.tags(tagIndex) -> colferContext.tags(tagIndex + 1))
              tagIndex += 2
            }
            tagsBuilder.result()

          } else Map.empty[String, String]


          // Only reads the entries for which there is a registered reader
          colferContext.entries.foldLeft(Context.of(tags)) {
            case (context, entryData) =>
              settings.incomingEntries.get(entryData.name).map { entryReader =>
                var contextWithEntry = context
                try {
                  contextWithEntry = entryReader.read(ByteStreamReader.of(entryData.content), context)
                } catch {
                  case NonFatal(t) => _log.warn("Failed to read entry [{}]", entryData.name.asInstanceOf[Any], t)
                }

                contextWithEntry
              }.getOrElse(context)
          }
        } getOrElse(Context.Empty)
      } else Context.Empty
    }

    override def write(context: Context, writer: ByteStreamWriter): Unit = {
      if (context.nonEmpty()) {
        val contextData = new ColferContext()
        val output = _streamPool.get()
        val contextOutgoingBuffer = _contextBufferPool.get()

        if (context.tags.nonEmpty) {
          val tags = Array.ofDim[String](context.tags.size * 2)
          var tagIndex = 0
          context.tags.foreach {
            case (key, value) =>
              tags.update(tagIndex, key)
              tags.update(tagIndex + 1, value)
              tagIndex += 2
          }

          contextData.tags = tags
        }

        if (context.entries.nonEmpty) {
          val entries = settings.outgoingEntries.collect {
            case (entryName, entryWriter) if context.entries.contains(entryName) =>
              val colferEntry = new ColferEntry()
              try {
                output.reset()
                entryWriter.write(context, output)

                colferEntry.name = entryName
                colferEntry.content = output.toByteArray()
              } catch {
                case NonFatal(t) => _log.warn("Failed to write entry [{}]", entryName.asInstanceOf[Any], t)
              }

              colferEntry
          }

          contextData.entries = entries.toArray
        }

        try {
          val contextSize = contextData.marshal(contextOutgoingBuffer, 0)
          writer.write(contextOutgoingBuffer, 0, contextSize)
        } catch {
          case NonFatal(t) => _log.warn("Failed to write Context to ByteStreamWriter", t)
        }
      }
    }
  }

  object Default {
    private class ReusableByteStreamWriter(size: Int) extends ByteArrayOutputStream(size) with ByteStreamWriter {
      def underlying(): Array[Byte] = this.buf
    }
  }

  case class Settings(
    maxOutgoingSize: Int,
    incomingEntries: Map[String, Propagation.EntryReader[ByteStreamReader]],
    outgoingEntries: Map[String, Propagation.EntryWriter[ByteStreamWriter]]
  )

  object Settings {
    private val log = LoggerFactory.getLogger(classOf[BinaryPropagation.Settings])

    def from(config: Config, classLoading: ClassLoading): BinaryPropagation.Settings = {
      def buildInstances[ExpectedType : ClassTag](mappings: Map[String, String]): Map[String, ExpectedType] = {
        val instanceMap = Map.newBuilder[String, ExpectedType]

        mappings.foreach {
          case (contextKey, componentClass) => classLoading.createInstance[ExpectedType](componentClass, Nil) match {
            case Success(componentInstance) => instanceMap += (contextKey -> componentInstance)
            case Failure(exception) => log.warn("Failed to instantiate {} [{}] due to []",
              implicitly[ClassTag[ExpectedType]].runtimeClass.getName, componentClass, exception)
          }
        }

        instanceMap.result()
      }

      Settings(
        config.getBytes("max-outgoing-size").toInt,
        buildInstances[Propagation.EntryReader[ByteStreamReader]](config.getConfig("entries.incoming").pairs),
        buildInstances[Propagation.EntryWriter[ByteStreamWriter]](config.getConfig("entries.outgoing").pairs)
      )
    }
  }
}