Skip to content

Commit 866143b

Browse files
authored
receiver/prometheus: add metricGroup.toSummaryPoint pdata conversion (#3668)
Implements metricGroupPdata toSummaryPoint and added unit tests as well as equivalence tests to ensure the migration will render the same results. Updates #3137 Depends on PR #3667 Updates PR #3427
1 parent f167ed8 commit 866143b

File tree

2 files changed

+233
-1
lines changed

2 files changed

+233
-1
lines changed

receiver/prometheusreceiver/internal/otlp_metricfamily.go

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,39 @@ func (mg *metricGroupPdata) toDistributionPoint(orderedLabelKeys []string, dest
139139
return true
140140
}
141141

142+
func (mg *metricGroupPdata) toSummaryPoint(orderedLabelKeys []string, dest *pdata.SummaryDataPointSlice) bool {
143+
// expecting count to be provided, however, in the following two cases, they can be missed.
144+
// 1. data is corrupted
145+
// 2. ignored by startValue evaluation
146+
if !mg.hasCount {
147+
return false
148+
}
149+
150+
mg.sortPoints()
151+
152+
point := dest.AppendEmpty()
153+
quantileValues := point.QuantileValues()
154+
for _, p := range mg.complexValue {
155+
quantile := quantileValues.AppendEmpty()
156+
quantile.SetValue(p.value)
157+
quantile.SetQuantile(p.boundary * 100)
158+
}
159+
160+
// Based on the summary description from https://prometheus.io/docs/concepts/metric_types/#summary
161+
// the quantiles are calculated over a sliding time window, however, the count is the total count of
162+
// observations and the corresponding sum is a sum of all observed values, thus the sum and count used
163+
// at the global level of the metricspb.SummaryValue
164+
// The timestamp MUST be in retrieved from milliseconds and converted to nanoseconds.
165+
tsNanos := pdata.Timestamp(mg.ts * 1e6)
166+
point.SetStartTimestamp(tsNanos)
167+
point.SetTimestamp(tsNanos)
168+
point.SetSum(mg.sum)
169+
point.SetCount(uint64(mg.count))
170+
populateLabelValuesPdata(orderedLabelKeys, mg.ls, point.LabelsMap())
171+
172+
return true
173+
}
174+
142175
func populateLabelValuesPdata(orderedKeys []string, ls labels.Labels, dest pdata.StringMap) {
143176
src := ls.Map()
144177
for _, key := range orderedKeys {

receiver/prometheusreceiver/internal/otlp_metricfamily_test.go

Lines changed: 200 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ var mc = byLookupMetadataCache{
6868
Metric: "s",
6969
Type: textparse.MetricTypeSummary,
7070
Help: "This is some help",
71-
Unit: "?",
71+
Unit: "ms",
7272
},
7373
"unknown": scrape.MetricMetadata{
7474
Metric: "u",
@@ -246,3 +246,202 @@ func TestMetricGroupData_toDistributionPointEquivalence(t *testing.T) {
246246
})
247247
}
248248
}
249+
250+
func TestMetricGroupData_toSummaryUnitTest(t *testing.T) {
251+
type scrape struct {
252+
at int64
253+
value float64
254+
metric string
255+
}
256+
257+
type labelsScrapes struct {
258+
labels labels.Labels
259+
scrapes []*scrape
260+
}
261+
tests := []struct {
262+
name string
263+
labelsScrapes []*labelsScrapes
264+
want func() pdata.SummaryDataPoint
265+
}{
266+
{
267+
name: "summary",
268+
labelsScrapes: []*labelsScrapes{
269+
{
270+
labels: labels.Labels{
271+
{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.0"}, {Name: "b", Value: "B"},
272+
},
273+
scrapes: []*scrape{
274+
{at: 10, value: 10, metric: "histogram_count"},
275+
{at: 10, value: 12, metric: "histogram_sum"},
276+
{at: 10, value: 8, metric: "value"},
277+
},
278+
},
279+
{
280+
labels: labels.Labels{
281+
{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.75"}, {Name: "b", Value: "B"},
282+
},
283+
scrapes: []*scrape{
284+
{at: 11, value: 10, metric: "histogram_count"},
285+
{at: 11, value: 1004.78, metric: "histogram_sum"},
286+
{at: 11, value: 33.7, metric: "value"},
287+
},
288+
},
289+
{
290+
labels: labels.Labels{
291+
{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.50"}, {Name: "b", Value: "B"},
292+
},
293+
scrapes: []*scrape{
294+
{at: 12, value: 10, metric: "histogram_count"},
295+
{at: 12, value: 13, metric: "histogram_sum"},
296+
{at: 12, value: 27, metric: "value"},
297+
},
298+
},
299+
{
300+
labels: labels.Labels{
301+
{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.90"}, {Name: "b", Value: "B"},
302+
},
303+
scrapes: []*scrape{
304+
{at: 13, value: 10, metric: "histogram_count"},
305+
{at: 13, value: 14, metric: "histogram_sum"},
306+
{at: 13, value: 56, metric: "value"},
307+
},
308+
},
309+
{
310+
labels: labels.Labels{
311+
{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.99"}, {Name: "b", Value: "B"},
312+
},
313+
scrapes: []*scrape{
314+
{at: 14, value: 10, metric: "histogram_count"},
315+
{at: 14, value: 15, metric: "histogram_sum"},
316+
{at: 14, value: 82, metric: "value"},
317+
},
318+
},
319+
},
320+
want: func() pdata.SummaryDataPoint {
321+
point := pdata.NewSummaryDataPoint()
322+
point.SetCount(10)
323+
point.SetSum(15)
324+
qtL := point.QuantileValues()
325+
qn0 := qtL.AppendEmpty()
326+
qn0.SetQuantile(0)
327+
qn0.SetValue(8)
328+
qn50 := qtL.AppendEmpty()
329+
qn50.SetQuantile(50)
330+
qn50.SetValue(27)
331+
qn75 := qtL.AppendEmpty()
332+
qn75.SetQuantile(75)
333+
qn75.SetValue(33.7)
334+
qn90 := qtL.AppendEmpty()
335+
qn90.SetQuantile(90)
336+
qn90.SetValue(56)
337+
qn99 := qtL.AppendEmpty()
338+
qn99.SetQuantile(99)
339+
qn99.SetValue(82)
340+
point.SetTimestamp(14 * 1e6) // the time in milliseconds -> nanoseconds.
341+
point.SetStartTimestamp(14 * 1e6)
342+
labelsMap := point.LabelsMap()
343+
labelsMap.Insert("a", "A")
344+
labelsMap.Insert("b", "B")
345+
return point
346+
},
347+
},
348+
}
349+
350+
for _, tt := range tests {
351+
tt := tt
352+
t.Run(tt.name, func(t *testing.T) {
353+
mp := newMetricFamilyPdata(tt.name, mc).(*metricFamilyPdata)
354+
for _, lbs := range tt.labelsScrapes {
355+
for _, scrape := range lbs.scrapes {
356+
require.NoError(t, mp.Add(scrape.metric, lbs.labels.Copy(), scrape.at, scrape.value))
357+
}
358+
}
359+
360+
require.Equal(t, 1, len(mp.groups), "Expecting exactly 1 groupKey")
361+
// Get the lone group key.
362+
groupKey := ""
363+
for key := range mp.groups {
364+
groupKey = key
365+
}
366+
require.NotNil(t, mp.groups[groupKey], "Expecting the groupKey to have a value given key:: "+groupKey)
367+
368+
sdpL := pdata.NewSummaryDataPointSlice()
369+
require.True(t, mp.groups[groupKey].toSummaryPoint(mp.labelKeysOrdered, &sdpL))
370+
require.Equal(t, 1, sdpL.Len(), "Exactly one point expected")
371+
got := sdpL.At(0)
372+
want := tt.want()
373+
require.Equal(t, want, got, "Expected the points to be equal")
374+
})
375+
}
376+
}
377+
378+
func TestMetricGroupData_toSummaryPointEquivalence(t *testing.T) {
379+
type scrape struct {
380+
at int64
381+
value float64
382+
metric string
383+
}
384+
tests := []struct {
385+
name string
386+
labels labels.Labels
387+
scrapes []*scrape
388+
}{
389+
{
390+
name: "summary",
391+
labels: labels.Labels{{Name: "a", Value: "A"}, {Name: "quantile", Value: "0.75"}, {Name: "b", Value: "B"}},
392+
scrapes: []*scrape{
393+
{at: 11, value: 10, metric: "summary_count"},
394+
{at: 11, value: 1004.78, metric: "summary_sum"},
395+
{at: 13, value: 33.7, metric: "value"},
396+
},
397+
},
398+
}
399+
400+
for _, tt := range tests {
401+
tt := tt
402+
t.Run(tt.name, func(t *testing.T) {
403+
mf := newMetricFamily(tt.name, mc, zap.NewNop()).(*metricFamily)
404+
mp := newMetricFamilyPdata(tt.name, mc).(*metricFamilyPdata)
405+
for _, tv := range tt.scrapes {
406+
require.NoError(t, mp.Add(tv.metric, tt.labels.Copy(), tv.at, tv.value))
407+
require.NoError(t, mf.Add(tv.metric, tt.labels.Copy(), tv.at, tv.value))
408+
}
409+
groupKey := mf.getGroupKey(tt.labels.Copy())
410+
ocTimeseries := mf.groups[groupKey].toSummaryTimeSeries(mf.labelKeysOrdered)
411+
sdpL := pdata.NewSummaryDataPointSlice()
412+
require.True(t, mp.groups[groupKey].toSummaryPoint(mp.labelKeysOrdered, &sdpL))
413+
require.Equal(t, len(ocTimeseries.Points), sdpL.Len(), "They should have the exact same number of points")
414+
require.Equal(t, 1, sdpL.Len(), "Exactly one point expected")
415+
ocPoint := ocTimeseries.Points[0]
416+
pdataPoint := sdpL.At(0)
417+
// 1. Ensure that the startTimestamps are equal.
418+
require.Equal(t, ocTimeseries.GetStartTimestamp().AsTime(), pdataPoint.Timestamp().AsTime(), "The timestamp must be equal")
419+
// 2. Ensure that the count is equal.
420+
ocSummary := ocPoint.GetSummaryValue()
421+
if false {
422+
t.Logf("\nOcSummary: %#v\nPdSummary: %#v\n\nocPoint: %#v\n", ocSummary, pdataPoint, ocPoint.GetSummaryValue())
423+
return
424+
}
425+
require.Equal(t, ocSummary.GetCount().GetValue(), int64(pdataPoint.Count()), "Count must be equal")
426+
// 3. Ensure that the sum is equal.
427+
require.Equal(t, ocSummary.GetSum().GetValue(), pdataPoint.Sum(), "Sum must be equal")
428+
// 4. Ensure that the point's timestamp is equal to that from the OpenCensusProto data point.
429+
require.Equal(t, ocPoint.GetTimestamp().AsTime(), pdataPoint.Timestamp().AsTime(), "Point timestamps must be equal")
430+
// 5. Ensure that the labels all match up.
431+
ocStringMap := pdata.NewStringMap()
432+
for i, labelValue := range ocTimeseries.LabelValues {
433+
ocStringMap.Insert(mf.labelKeysOrdered[i], labelValue.Value)
434+
}
435+
require.Equal(t, ocStringMap.Sort(), pdataPoint.LabelsMap().Sort())
436+
// 6. Ensure that the quantile values all match up.
437+
ocQuantiles := ocSummary.GetSnapshot().GetPercentileValues()
438+
pdataQuantiles := pdataPoint.QuantileValues()
439+
require.Equal(t, len(ocQuantiles), pdataQuantiles.Len())
440+
for i, ocQuantile := range ocQuantiles {
441+
pdataQuantile := pdataQuantiles.At(i)
442+
require.Equal(t, ocQuantile.Percentile, pdataQuantile.Quantile(), "The quantile percentiles must match")
443+
require.Equal(t, ocQuantile.Value, pdataQuantile.Value(), "The quantile values must match")
444+
}
445+
})
446+
}
447+
}

0 commit comments

Comments
 (0)