Skip to content

Commit b8dea69

Browse files
= kamon-datadog: Add p99 metric (#1360)
1 parent d9f7f9f commit b8dea69

File tree

3 files changed

+110
-46
lines changed

3 files changed

+110
-46
lines changed

reporters/kamon-datadog/src/main/resources/reference.conf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,11 @@ kamon {
8686
# The log level in which to log failures to submit metrics.
8787
failure-log-level = "error"
8888

89+
# For histograms, which percentiles to submit.
90+
# Each value configured here will correspond to a different custom metric submitted to Datadog.
91+
# Currently only applicable to the API reporter.
92+
percentiles = [95.0]
93+
8994
# All time values are collected in nanoseconds,
9095
# to scale before sending to datadog set "time-units" to "s" or "ms" or "µs".
9196
# Value "n" is equivalent to omitting the setting

reporters/kamon-datadog/src/main/scala/kamon/datadog/DatadogAPIReporter.scala

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,17 @@ import java.nio.charset.StandardCharsets
2121
import java.text.{DecimalFormat, DecimalFormatSymbols}
2222
import java.time.Duration
2323
import java.util.Locale
24-
2524
import com.typesafe.config.Config
2625
import kamon.metric.MeasurementUnit.Dimension.{Information, Time}
2726
import kamon.metric.{MeasurementUnit, MetricSnapshot, PeriodSnapshot}
2827
import kamon.tag.{Tag, TagSet}
2928
import kamon.util.{EnvironmentTags, Filter}
30-
import kamon.{module, Kamon}
29+
import kamon.{Kamon, module}
3130
import kamon.datadog.DatadogAPIReporter.Configuration
3231
import kamon.module.{MetricReporter, ModuleFactory}
3332
import org.slf4j.LoggerFactory
3433
import org.slf4j.event.Level
35-
34+
import scala.collection.JavaConverters._
3635
import scala.util.{Failure, Success}
3736

3837
class DatadogAPIReporterFactory extends ModuleFactory {
@@ -82,6 +81,12 @@ class DatadogAPIReporter(
8281
val interval = Math.round(Duration.between(snapshot.from, snapshot.to).toMillis() / 1000d)
8382
val seriesBuilder = new StringBuilder()
8483

84+
@inline
85+
def doubleToPercentileString(double: Double) = {
86+
if (double == double.toLong) f"${double.toLong}%d"
87+
else f"$double%s"
88+
}
89+
8590
def addDistribution(metric: MetricSnapshot.Distributions): Unit = {
8691
val unit = metric.settings.unit
8792
metric.instruments.foreach { d =>
@@ -91,12 +96,14 @@ class DatadogAPIReporter(
9196
addMetric(metric.name + ".avg", valueFormat.format(scale(average, unit)), gauge, d.tags)
9297
addMetric(metric.name + ".count", valueFormat.format(dist.count), count, d.tags)
9398
addMetric(metric.name + ".median", valueFormat.format(scale(dist.percentile(50d).value, unit)), gauge, d.tags)
94-
addMetric(
95-
metric.name + ".95percentile",
96-
valueFormat.format(scale(dist.percentile(95d).value, unit)),
97-
gauge,
98-
d.tags
99-
)
99+
configuration.percentiles.foreach { p =>
100+
addMetric(
101+
metric.name + s".${doubleToPercentileString(p)}percentile",
102+
valueFormat.format(scale(dist.percentile(p).value, unit)),
103+
gauge,
104+
d.tags
105+
)
106+
}
100107
addMetric(metric.name + ".max", valueFormat.format(scale(dist.max, unit)), gauge, d.tags)
101108
addMetric(metric.name + ".min", valueFormat.format(scale(dist.min, unit)), gauge, d.tags)
102109
}
@@ -164,6 +171,7 @@ private object DatadogAPIReporter {
164171

165172
case class Configuration(
166173
httpConfig: Config,
174+
percentiles: Set[Double],
167175
timeUnit: MeasurementUnit,
168176
informationUnit: MeasurementUnit,
169177
extraTags: Seq[(String, String)],
@@ -187,6 +195,7 @@ private object DatadogAPIReporter {
187195

188196
Configuration(
189197
datadogConfig.getConfig("api"),
198+
percentiles = datadogConfig.getDoubleList("percentiles").asScala.toList.map(_.toDouble).toSet,
190199
timeUnit = readTimeUnit(datadogConfig.getString("time-unit")),
191200
informationUnit = readInformationUnit(datadogConfig.getString("information-unit")),
192201
// Remove the "host" tag since it gets added to the datadog payload separately

reporters/kamon-datadog/src/test/scala/kamon/datadog/DatadogAPIReporterSpec.scala

Lines changed: 87 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,43 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec
6565
server.getRequestCount shouldEqual 4
6666
}
6767

68+
val examplePeriodWithDistributions: PeriodSnapshot = {
69+
val distributionExample = new Distribution {
70+
override def dynamicRange: DynamicRange = ???
71+
override def min: Long = 0
72+
override def max: Long = 10
73+
override def sum: Long = 100
74+
override def count: Long = 5
75+
override def percentile(rank: Double): Distribution.Percentile = new Percentile {
76+
override def rank: Double = 0
77+
override def value: Long = 0
78+
override def countAtRank: Long = 0
79+
}
80+
override def percentiles: Seq[Distribution.Percentile] = ???
81+
override def percentilesIterator: Iterator[Distribution.Percentile] = ???
82+
override def buckets: Seq[Distribution.Bucket] = ???
83+
override def bucketsIterator: Iterator[Distribution.Bucket] = ???
84+
}
85+
PeriodSnapshot.apply(
86+
now.minusMillis(1000),
87+
now,
88+
Nil,
89+
Nil,
90+
Nil,
91+
MetricSnapshot.ofDistributions(
92+
"test.timer",
93+
"test",
94+
Metric.Settings.ForDistributionInstrument(
95+
MeasurementUnit.none,
96+
java.time.Duration.ZERO,
97+
DynamicRange.Default
98+
),
99+
Instrument.Snapshot.apply(TagSet.Empty, distributionExample) :: Nil
100+
) :: Nil,
101+
Nil
102+
)
103+
}
104+
68105
"sends metrics - compressed" in {
69106
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
70107
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
@@ -103,50 +140,38 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec
103140

104141
}
105142

106-
"send timer metrics" in {
143+
"send timer metrics with the p95 percentile by default" in {
107144
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
108145
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
109146
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
110147
applyConfig("kamon.datadog.api.compression = false")
111148
reporter.reconfigure(Kamon.config())
112149

113-
val distribution = new Distribution {
114-
override def dynamicRange: DynamicRange = ???
115-
override def min: Long = 0
116-
override def max: Long = 10
117-
override def sum: Long = 100
118-
override def count: Long = 5
119-
override def percentile(rank: Double): Distribution.Percentile = new Percentile {
120-
override def rank: Double = 0
121-
override def value: Long = 0
122-
override def countAtRank: Long = 0
123-
}
124-
override def percentiles: Seq[Distribution.Percentile] = ???
125-
override def percentilesIterator: Iterator[Distribution.Percentile] = ???
126-
override def buckets: Seq[Distribution.Bucket] = ???
127-
override def bucketsIterator: Iterator[Distribution.Bucket] = ???
128-
}
129-
130-
reporter.reportPeriodSnapshot(
131-
PeriodSnapshot.apply(
132-
now.minusMillis(1000),
133-
now,
134-
Nil,
135-
Nil,
136-
Nil,
137-
MetricSnapshot.ofDistributions(
138-
"test.timer",
139-
"test",
140-
Metric.Settings.ForDistributionInstrument(
141-
MeasurementUnit.none,
142-
java.time.Duration.ZERO,
143-
DynamicRange.Default
144-
),
145-
Instrument.Snapshot.apply(TagSet.Empty, distribution) :: Nil
146-
) :: Nil,
147-
Nil
150+
reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
151+
val request = server.takeRequest()
152+
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
153+
request.getMethod shouldEqual "POST"
154+
Json.parse(request.getBody.readUtf8()) shouldEqual Json
155+
.parse(
156+
"""{"series":[
157+
|{"metric":"test.timer.avg","interval":1,"points":[[1523394,20]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
158+
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
159+
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
160+
|{"metric":"test.timer.95percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
161+
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
162+
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
148163
)
149-
)
164+
}
165+
166+
"send timer metrics allowing configuration of percentiles to submit" in {
167+
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
168+
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
169+
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
170+
applyConfig("kamon.datadog.api.compression = false")
171+
applyConfig("kamon.datadog.percentiles = [95.0, 99, 94.5]")
172+
reporter.reconfigure(Kamon.config())
173+
174+
reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
150175
val request = server.takeRequest()
151176
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
152177
request.getMethod shouldEqual "POST"
@@ -157,6 +182,31 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec
157182
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
158183
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
159184
|{"metric":"test.timer.95percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
185+
|{"metric":"test.timer.99percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
186+
|{"metric":"test.timer.94.5percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
187+
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
188+
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
189+
)
190+
}
191+
192+
"send timer metrics without percentiles" in {
193+
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
194+
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
195+
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
196+
applyConfig("kamon.datadog.api.compression = false")
197+
applyConfig("kamon.datadog.percentiles = []")
198+
reporter.reconfigure(Kamon.config())
199+
200+
reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
201+
val request = server.takeRequest()
202+
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
203+
request.getMethod shouldEqual "POST"
204+
Json.parse(request.getBody.readUtf8()) shouldEqual Json
205+
.parse(
206+
"""{"series":[
207+
|{"metric":"test.timer.avg","interval":1,"points":[[1523394,20]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
208+
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
209+
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
160210
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
161211
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
162212
)

0 commit comments

Comments
 (0)