aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/spark/metrics/sink/CsvSink.scala
blob: e6c5bffd3c410a9527c1adc79a97c9c294664a5b (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
package spark.metrics.sink

import com.codahale.metrics.{CsvReporter, MetricRegistry}

import java.io.File
import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit

import spark.metrics.MetricsSystem

class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {
  val pollPeriod = Option(property.getProperty(CsvSink.CSV_KEY_PERIOD)) match {
    case Some(s) => s.toInt
    case None => CsvSink.CSV_DEFAULT_PERIOD.toInt
  }
  
  val pollUnit = Option(property.getProperty(CsvSink.CSV_KEY_UNIT)) match {
    case Some(s) => MetricsSystem.timeUnits(s)
    case None => MetricsSystem.timeUnits(CsvSink.CSV_DEFAULT_UNIT)
  }
  
  val pollDir = Option(property.getProperty(CsvSink.CSV_KEY_DIR)) match {
    case Some(s) => s
    case None => CsvSink.CSV_DEFAULT_DIR
  }
  
  val reporter: CsvReporter = CsvReporter.forRegistry(registry)
      .formatFor(Locale.US)
      .convertDurationsTo(TimeUnit.MILLISECONDS)
      .convertRatesTo(TimeUnit.SECONDS)
      .build(new File(pollDir))

  override def start() {
    reporter.start(pollPeriod, pollUnit)  
  }
  
  override def stop() {
    reporter.stop()
  }
}

object CsvSink {
  val CSV_KEY_PERIOD = "period"
  val CSV_KEY_UNIT = "unit"
  val CSV_KEY_DIR = "directory"
    
  val CSV_DEFAULT_PERIOD = "10"
  val CSV_DEFAULT_UNIT = "second"
  val CSV_DEFAULT_DIR = "/tmp/"
}