aboutsummaryrefslogtreecommitdiff
path: root/kamon-influxdb/src/main/scala/kamon/influxdb/TagsGenerator.scala
blob: f478e61abdd816fb32f6b90a3c47db97f396fa5d (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
/*
 * =========================================================================================
 * Copyright © 2013-2014 the kamon project <http://kamon.io/>
 *
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
 * except in compliance with the License. You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software distributed under the
 * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
 * either express or implied. See the License for the specific language governing permissions
 * and limitations under the License.
 * =========================================================================================
 */

package kamon.influxdb

import java.lang.management.ManagementFactory

import com.typesafe.config.Config
import kamon.metric.instrument.Histogram
import kamon.metric.{ Entity, MetricKey }
import collection.JavaConversions._

trait TagsGenerator {
  protected val config: Config

  protected val application = config.getString("application-name")

  val hostname = {
    val hostnameOverride = config.getString("hostname-override")

    if (hostnameOverride.equals("none")) {
      ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
    } else {
      hostnameOverride
    }
  }

  protected val percentiles = config.getDoubleList("percentiles").toList

  protected def generateTags(entity: Entity, metricKey: MetricKey): Map[String, String] =
    entity.category match {
      case "trace-segment" 
        Map(
          "category" -> normalize(entity.tags("trace")),
          "entity" -> normalize(entity.name),
          "hostname" -> normalize(hostname),
          "metric" -> normalize(metricKey.name))
      case _ 
        Map(
          "category" -> normalize(entity.category),
          "entity" -> normalize(entity.name),
          "hostname" -> normalize(hostname),
          "metric" -> normalize(metricKey.name))
    }

  protected def histogramValues(hs: Histogram.Snapshot): Map[String, BigDecimal] = {
    val defaults = Map(
      "lower" -> BigDecimal(hs.min),
      "mean" -> average(hs),
      "upper" -> BigDecimal(hs.max))

    percentiles.foldLeft(defaults) { (acc, p) 
      val fractional = p % 1
      val integral = (p - fractional).toInt

      val percentile = BigDecimal(hs.percentile(p))

      if (fractional > 0.0) acc ++ Map(s"p$p" -> percentile)
      else acc ++ Map(s"p$integral" -> percentile)
    }
  }

  protected def normalize(s: String): String =
    s
      .replace(": ", "-")
      .replace(":\\", "-")
      .replace(":", "-")
      .replace(" ", "-")
      .replace("\\", "-")
      .replace("/", "-")
      .replace(".", "-")

  private def average(histogram: Histogram.Snapshot): BigDecimal = {
    if (histogram.numberOfMeasurements == 0) BigDecimal(0.0)
    else BigDecimal(histogram.sum / histogram.numberOfMeasurements.toDouble)
  }

}