Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
131a3b8
test cases
perebaj Mar 19, 2025
1ca4ec8
improve testcase
perebaj Mar 19, 2025
b9dfc9f
mock consumer metrics
perebaj Mar 21, 2025
288c6bd
clean interRequestCache for each run
perebaj Mar 21, 2025
f2c0671
add target info cache
perebaj Mar 21, 2025
bd80734
Update receiver/prometheusremotewritereceiver/receiver.go
perebaj Mar 21, 2025
07fe080
Update receiver/prometheusremotewritereceiver/receiver.go
perebaj Mar 21, 2025
82d52a7
Update receiver/prometheusremotewritereceiver/receiver.go
perebaj Mar 21, 2025
2ddf810
Update receiver/prometheusremotewritereceiver/receiver.go
perebaj Mar 21, 2025
9f19aed
metric name target_info
perebaj Apr 21, 2025
499f0de
Merge branch 'target-info' of github.com:perebaj/opentelemetry-collec…
perebaj Apr 21, 2025
ddce741
remove t.Run
perebaj Apr 21, 2025
14f989d
fix conflict on go.modules and go.sum
perebaj Apr 21, 2025
5a08906
validate expected metric in the end of request
perebaj Apr 21, 2025
2451c27
add receiverhelper.ObsReport
perebaj Apr 21, 2025
f262c7c
Merge branch 'main' into target-info
perebaj Apr 21, 2025
42390ff
Update .chloggen/target-info.yaml
perebaj Apr 22, 2025
3026ee4
Update .chloggen/target-info.yaml
perebaj Apr 22, 2025
7739e72
remove obsreport
perebaj Apr 22, 2025
5afb0bc
Merge branch 'target-info' of github.com:perebaj/opentelemetry-collec…
perebaj Apr 22, 2025
05a8845
remove useless validation ls.Has(labels.MetricName)
perebaj Apr 22, 2025
d2ae2f4
remove useless ReplaceAll _ .
perebaj Apr 22, 2025
bc70d0c
remove old comments
perebaj Apr 22, 2025
3ce627e
replace METRIC_TYPE_INFO by GAUGE
perebaj Apr 22, 2025
14fbe4c
assert stats
perebaj Apr 22, 2025
c4e8771
tests sending target info in different orders
perebaj Apr 22, 2025
2922d19
Merge branch 'main' into target-info
perebaj Apr 22, 2025
da08b4c
conflict go.sum/mod
perebaj Apr 22, 2025
130c2b8
remove replace library & add unit test for taget info multiple requests
perebaj Apr 23, 2025
d13f346
improve comments
perebaj Apr 23, 2025
eceb950
Merge branch 'main' into target-info
perebaj Apr 23, 2025
892426a
rename cache variable
perebaj Apr 23, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions .chloggen/target-info.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: 'enhancement'

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: prometheusremotewritereceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Create a cache approach to deal with desyncronization between requests that will be processed.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [37277]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: The target info can be send through this service in a desynchronized way, so we need to create a cache to deal with this edge case.

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
52 changes: 42 additions & 10 deletions receiver/prometheusremotewritereceiver/receiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,18 @@ func newRemoteWriteReceiver(settings receiver.Settings, cfg *Config, nextConsume
server: &http.Server{
ReadTimeout: 60 * time.Second,
},
interRequestCache: make(map[uint64]pmetric.ResourceMetrics),
}, nil
}

type prometheusRemoteWriteReceiver struct {
settings receiver.Settings
nextConsumer consumer.Metrics

config *Config
server *http.Server
wg sync.WaitGroup
config *Config
server *http.Server
wg sync.WaitGroup
interRequestCache map[uint64]pmetric.ResourceMetrics
}

func (prw *prometheusRemoteWriteReceiver) Start(ctx context.Context, host component.Host) error {
Expand Down Expand Up @@ -123,14 +125,15 @@ func (prw *prometheusRemoteWriteReceiver) handlePRW(w http.ResponseWriter, req *
return
}

_, stats, err := prw.translateV2(req.Context(), &prw2Req)
m, stats, err := prw.translateV2(req.Context(), &prw2Req)
stats.SetHeaders(w)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest) // Following instructions at https://prometheus.io/docs/specs/remote_write_spec_2_0/#invalid-samples
return
}

w.WriteHeader(http.StatusNoContent)
_ = prw.nextConsumer.ConsumeMetrics(req.Context(), m)
}

// parseProto parses the content-type header and returns the version of the remote-write protocol.
Expand Down Expand Up @@ -177,32 +180,61 @@ func (prw *prometheusRemoteWriteReceiver) translateV2(_ context.Context, req *wr
// Instead of creating a whole new OTLP metric, we just append the new sample to the existing OTLP metric.
// This cache is called "intra" because in the future we'll have a "interRequestCache" to cache resourceAttributes
// between requests based on the metric "target_info".
intraRequestCache = make(map[uint64]pmetric.ResourceMetrics)
// intraRequestCache = make(map[uint64]pmetric.ResourceMetrics)
// The key is composed by: resource_hash:scope_name:scope_version:metric_name:unit:type
// TODO: use the appropriate hash function.
metricCache = make(map[string]pmetric.Metric)
)

for _, ts := range req.Timeseries {
ls := ts.ToLabels(&labelsBuilder, req.Symbols)
if !ls.Has(labels.MetricName) {
if !ls.Has(labels.MetricName) && ts.Metadata.Type != writev2.Metadata_METRIC_TYPE_INFO {
badRequestErrors = errors.Join(badRequestErrors, fmt.Errorf("missing metric name in labels"))
continue
} else if duplicateLabel, hasDuplicate := ls.HasDuplicateLabelNames(); hasDuplicate {
badRequestErrors = errors.Join(badRequestErrors, fmt.Errorf("duplicate label %q in labels", duplicateLabel))
continue
}

// If it is a metric of type INFO, we use its labels as attributes of the resource
// Ref: https://opentelemetry.io/docs/specs/otel/compatibility/prometheus_and_openmetrics/#resource-attributes-1
if ts.Metadata.Type == writev2.Metadata_METRIC_TYPE_INFO {
var rm pmetric.ResourceMetrics
hashedLabels := xxhash.Sum64String(ls.Get("job") + string([]byte{'\xff'}) + ls.Get("instance"))

// search or create the ResourceMetrics
if existingRM, ok := prw.interRequestCache[hashedLabels]; ok {
rm = existingRM
} else {
rm = otelMetrics.ResourceMetrics().AppendEmpty()
}

// Add all labels as resource attributes
attrs := rm.Resource().Attributes()
parseJobAndInstance(attrs, ls.Get("job"), ls.Get("instance"))

// Add the remaining labels as resource attributes
for _, l := range ls {
if l.Name != "job" && l.Name != "instance" && l.Name != labels.MetricName {
// Convert the label name to the resource attribute format
attrKey := strings.ReplaceAll(l.Name, "_", ".")
attrs.PutStr(attrKey, l.Value)
}
}
prw.interRequestCache[hashedLabels] = rm
continue
}

// For the rest of the metrics, continue with the normal processing
var rm pmetric.ResourceMetrics
hashedLabels := xxhash.Sum64String(ls.Get("job") + string([]byte{'\xff'}) + ls.Get("instance"))
intraCacheEntry, ok := intraRequestCache[hashedLabels]
existingRM, ok := prw.interRequestCache[hashedLabels]
if ok {
// We found the same time series in the same request, so we should append to the same OTLP metric.
rm = intraCacheEntry
rm = existingRM
} else {
rm = otelMetrics.ResourceMetrics().AppendEmpty()
parseJobAndInstance(rm.Resource().Attributes(), ls.Get("job"), ls.Get("instance"))
intraRequestCache[hashedLabels] = rm
prw.interRequestCache[hashedLabels] = rm
}

scopeName, scopeVersion := prw.extractScopeInfo(ls)
Expand Down
183 changes: 181 additions & 2 deletions receiver/prometheusremotewritereceiver/receiver_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"fmt"
"net/http"
"net/http/httptest"
"sync"
"testing"
"time"

Expand All @@ -18,6 +19,7 @@ import (
writev2 "github.com/prometheus/prometheus/prompb/io/prometheus/write/v2"
"github.com/prometheus/prometheus/storage/remote"
"github.com/stretchr/testify/assert"
"go.opentelemetry.io/collector/consumer"
"go.opentelemetry.io/collector/consumer/consumertest"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
Expand Down Expand Up @@ -406,17 +408,194 @@ func TestTranslateV2(t *testing.T) {
return expected
}(),
},
{
name: "service with target_info metric",
request: &writev2.Request{
Symbols: []string{
"",
"job", "production/service_a", // 1, 2
"instance", "host1", // 3, 4
"machine_type", "n1-standard-1", // 5, 6
"cloud_provider", "gcp", // 7, 8
"region", "us-central1", // 9, 10
"datacenter", "sdc", // 11, 12
"__name__", "normal_metric", // 13, 14
"d", "e", // 15, 16
},
Timeseries: []writev2.TimeSeries{
// Generating 2 metrics, one is a type info and the other is a normal gauge.
// The type info metric should be translated to just contains the resource attributes.
// The normal_metric should be translated as usual.
{
Metadata: writev2.Metadata{Type: writev2.Metadata_METRIC_TYPE_INFO},
LabelsRefs: []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12},
},
{
Metadata: writev2.Metadata{Type: writev2.Metadata_METRIC_TYPE_GAUGE},
LabelsRefs: []uint32{13, 14, 1, 2, 3, 4, 15, 16},
Samples: []writev2.Sample{{Value: 1, Timestamp: 1}},
},
},
},
expectedMetrics: func() pmetric.Metrics {
metrics := pmetric.NewMetrics()

rm := metrics.ResourceMetrics().AppendEmpty()
attrs := rm.Resource().Attributes()
attrs.PutStr("service.namespace", "production")
attrs.PutStr("service.name", "service_a")
attrs.PutStr("service.instance.id", "host1")
attrs.PutStr("machine.type", "n1-standard-1")
attrs.PutStr("cloud.provider", "gcp")
attrs.PutStr("region", "us-central1")
attrs.PutStr("datacenter", "sdc")

sm := rm.ScopeMetrics().AppendEmpty()
sm.Scope().SetName("OpenTelemetry Collector")
sm.Scope().SetVersion("latest")

m := sm.Metrics().AppendEmpty()
m.SetName("normal_metric")
m.SetUnit("")
m.SetDescription("")

dp := m.SetEmptyGauge().DataPoints().AppendEmpty()
dp.SetDoubleValue(1.0)
dp.SetTimestamp(pcommon.Timestamp(1 * int64(time.Millisecond)))
dp.Attributes().PutStr("d", "e")

return metrics
}(),
},
} {
t.Run(tc.name, func(t *testing.T) {
metrics, stats, err := prwReceiver.translateV2(ctx, tc.request)
// since we are using the interRequestCache to store values across requests, we need to clear it after each test, otherwise it will affect the next test
prwReceiver.interRequestCache = make(map[uint64]pmetric.ResourceMetrics)
metrics, _, err := prwReceiver.translateV2(ctx, tc.request)
if tc.expectError != "" {
assert.ErrorContains(t, err, tc.expectError)
return
}

assert.NoError(t, err)
assert.NoError(t, pmetrictest.CompareMetrics(tc.expectedMetrics, metrics))
assert.Equal(t, tc.expectedStats, stats)
})
}
}

type nonMutatingConsumer struct{}

// Capabilities returns the base consumer capabilities.
func (bc nonMutatingConsumer) Capabilities() consumer.Capabilities {
return consumer.Capabilities{MutatesData: false}
}

type MockConsumer struct {
nonMutatingConsumer
mu sync.Mutex
metrics []pmetric.Metrics
dataPoints int
}

func (m *MockConsumer) ConsumeMetrics(_ context.Context, md pmetric.Metrics) error {
m.mu.Lock()
defer m.mu.Unlock()
m.metrics = append(m.metrics, md)
m.dataPoints += md.DataPointCount()
return nil
}

func TestTargetInfoWithMultipleRequests(t *testing.T) {
prwReceiver := setupMetricsReceiver(t)
mockConsumer := new(MockConsumer)
prwReceiver.nextConsumer = mockConsumer
w := httptest.NewRecorder()

firstRequest := &writev2.Request{
Symbols: []string{
"",
"job", "production/service_a", // 1, 2
"instance", "host1", // 3, 4
"machine_type", "n1-standard-1", // 5, 6
"cloud_provider", "gcp", // 7, 8
"region", "us-central1", // 9, 10
},
Timeseries: []writev2.TimeSeries{
{
Metadata: writev2.Metadata{Type: writev2.Metadata_METRIC_TYPE_INFO},
LabelsRefs: []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
},
},
}

buf := proto.NewBuffer(nil)
err := buf.Marshal(firstRequest)
assert.NoError(t, err)

req1 := httptest.NewRequest(http.MethodPost, "/api/v1/write", bytes.NewReader(buf.Bytes()))
req1.Header.Set("Content-Type", fmt.Sprintf("application/x-protobuf;proto=%s", promconfig.RemoteWriteProtoMsgV2))
req1.Header.Set("Content-Encoding", "snappy")

prwReceiver.handlePRW(w, req1)
resp1 := w.Result()
assert.Equal(t, http.StatusNoContent, resp1.StatusCode)

secondRequest := &writev2.Request{
Symbols: []string{
"",
"job", "production/service_a", // 1, 2
"instance", "host1", // 3, 4
"__name__", "normal_metric", // 5, 6
"foo", "bar", // 7, 8
},
Timeseries: []writev2.TimeSeries{
{
Metadata: writev2.Metadata{Type: writev2.Metadata_METRIC_TYPE_GAUGE},
LabelsRefs: []uint32{5, 6, 1, 2, 3, 4, 7, 8},
Samples: []writev2.Sample{{Value: 2, Timestamp: 2}},
},
},
}

buf2 := proto.NewBuffer(nil)
err = buf2.Marshal(secondRequest)
assert.NoError(t, err)

req2 := httptest.NewRequest(http.MethodPost, "/api/v1/write", bytes.NewReader(buf2.Bytes()))
req2.Header.Set("Content-Type", fmt.Sprintf("application/x-protobuf;proto=%s", promconfig.RemoteWriteProtoMsgV2))
req2.Header.Set("Content-Encoding", "snappy")
w = httptest.NewRecorder()
prwReceiver.handlePRW(w, req2)
resp2 := w.Result()
assert.Equal(t, http.StatusNoContent, resp2.StatusCode)

expectedMetrics := func() pmetric.Metrics {
metrics := pmetric.NewMetrics()

rm := metrics.ResourceMetrics().AppendEmpty()
attrs := rm.Resource().Attributes()
attrs.PutStr("service.namespace", "production")
attrs.PutStr("service.name", "service_a")
attrs.PutStr("service.instance.id", "host1")
attrs.PutStr("machine.type", "n1-standard-1")
attrs.PutStr("cloud.provider", "gcp")
attrs.PutStr("region", "us-central1")

sm := rm.ScopeMetrics().AppendEmpty()
sm.Scope().SetName("OpenTelemetry Collector")
sm.Scope().SetVersion("latest")

m1 := sm.Metrics().AppendEmpty()
m1.SetName("normal_metric")
m1.SetUnit("")
m1.SetDescription("")
dp1 := m1.SetEmptyGauge().DataPoints().AppendEmpty()
dp1.SetDoubleValue(2.0)
dp1.SetTimestamp(pcommon.Timestamp(2 * int64(time.Millisecond)))
dp1.Attributes().PutStr("foo", "bar")

return metrics
}()

assert.NoError(t, pmetrictest.CompareMetrics(expectedMetrics, mockConsumer.metrics[0]))
}
Loading