Skip to content

Commit 8026284

Browse files
songy23jriguera
authored andcommitted
[chore][exporter/datadog] skip known data race (open-telemetry#35128)
**Description:** Skip `TestIntegrationInternalMetrics` in race detector **Link to tracking Issue:** open-telemetry#34836
1 parent 733e729 commit 8026284

File tree

2 files changed

+98
-82
lines changed

2 files changed

+98
-82
lines changed

exporter/datadogexporter/integrationtest/integration_test.go

Lines changed: 0 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ import (
1111
"fmt"
1212
"io"
1313
"net/http"
14-
"runtime"
1514
"strings"
1615
"sync"
1716
"testing"
@@ -528,84 +527,3 @@ func sendLogs(t *testing.T, numLogs int) {
528527
lr := make([]log.Record, numLogs)
529528
assert.NoError(t, logExporter.Export(ctx, lr))
530529
}
531-
532-
func TestIntegrationInternalMetrics(t *testing.T) {
533-
if runtime.GOOS == "windows" {
534-
t.Skip("flaky test on windows https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/34836")
535-
}
536-
537-
// 1. Set up mock Datadog server
538-
seriesRec := &testutil.HTTPRequestRecorderWithChan{Pattern: testutil.MetricV2Endpoint, ReqChan: make(chan []byte, 100)}
539-
tracesRec := &testutil.HTTPRequestRecorderWithChan{Pattern: testutil.TraceEndpoint, ReqChan: make(chan []byte, 100)}
540-
server := testutil.DatadogServerMock(seriesRec.HandlerFunc, tracesRec.HandlerFunc)
541-
defer server.Close()
542-
t.Setenv("SERVER_URL", server.URL)
543-
544-
// 2. Start in-process collector
545-
factories := getIntegrationTestComponents(t)
546-
app := getIntegrationTestCollector(t, "integration_test_internal_metrics_config.yaml", factories)
547-
go func() {
548-
assert.NoError(t, app.Run(context.Background()))
549-
}()
550-
defer app.Shutdown()
551-
552-
waitForReadiness(app)
553-
554-
// 3. Generate and send traces
555-
sendTraces(t)
556-
557-
// 4. Validate Datadog trace agent & OTel internal metrics are sent to the mock server
558-
expectedMetrics := map[string]struct{}{
559-
// Datadog internal metrics on trace and stats writers
560-
"otelcol_datadog_otlp_translator_resources_missing_source": {},
561-
"otelcol_datadog_trace_agent_stats_writer_bytes": {},
562-
"otelcol_datadog_trace_agent_stats_writer_retries": {},
563-
"otelcol_datadog_trace_agent_stats_writer_stats_buckets": {},
564-
"otelcol_datadog_trace_agent_stats_writer_stats_entries": {},
565-
"otelcol_datadog_trace_agent_stats_writer_payloads": {},
566-
"otelcol_datadog_trace_agent_stats_writer_client_payloads": {},
567-
"otelcol_datadog_trace_agent_stats_writer_errors": {},
568-
"otelcol_datadog_trace_agent_stats_writer_splits": {},
569-
"otelcol_datadog_trace_agent_trace_writer_bytes": {},
570-
"otelcol_datadog_trace_agent_trace_writer_retries": {},
571-
"otelcol_datadog_trace_agent_trace_writer_spans": {},
572-
"otelcol_datadog_trace_agent_trace_writer_traces": {},
573-
"otelcol_datadog_trace_agent_trace_writer_payloads": {},
574-
"otelcol_datadog_trace_agent_trace_writer_errors": {},
575-
"otelcol_datadog_trace_agent_trace_writer_events": {},
576-
577-
// OTel collector internal metrics
578-
"otelcol_process_memory_rss": {},
579-
"otelcol_process_runtime_total_sys_memory_bytes": {},
580-
"otelcol_process_uptime": {},
581-
"otelcol_process_cpu_seconds": {},
582-
"otelcol_process_runtime_heap_alloc_bytes": {},
583-
"otelcol_process_runtime_total_alloc_bytes": {},
584-
"otelcol_receiver_accepted_metric_points": {},
585-
"otelcol_receiver_accepted_spans": {},
586-
"otelcol_exporter_queue_capacity": {},
587-
"otelcol_exporter_queue_size": {},
588-
"otelcol_exporter_sent_spans": {},
589-
"otelcol_exporter_sent_metric_points": {},
590-
}
591-
592-
metricMap := make(map[string]series)
593-
for len(metricMap) < len(expectedMetrics) {
594-
select {
595-
case <-tracesRec.ReqChan:
596-
// Drain the channel, no need to look into the traces
597-
case metricsBytes := <-seriesRec.ReqChan:
598-
var metrics seriesSlice
599-
gz := getGzipReader(t, metricsBytes)
600-
dec := json.NewDecoder(gz)
601-
assert.NoError(t, dec.Decode(&metrics))
602-
for _, s := range metrics.Series {
603-
if _, ok := expectedMetrics[s.Metric]; ok {
604-
metricMap[s.Metric] = s
605-
}
606-
}
607-
case <-time.After(60 * time.Second):
608-
t.Fail()
609-
}
610-
}
611-
}
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
// Copyright The OpenTelemetry Authors
2+
// SPDX-License-Identifier: Apache-2.0
3+
4+
//go:build !race
5+
6+
package integrationtest // import "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/integrationtest"
7+
8+
import (
9+
"context"
10+
"encoding/json"
11+
"runtime"
12+
"testing"
13+
"time"
14+
15+
"github.com/DataDog/datadog-agent/comp/otelcol/otlp/testutil"
16+
"github.com/stretchr/testify/assert"
17+
)
18+
19+
func TestIntegrationInternalMetrics(t *testing.T) {
20+
if runtime.GOOS == "windows" {
21+
t.Skip("flaky test on windows https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/34836")
22+
}
23+
24+
// 1. Set up mock Datadog server
25+
seriesRec := &testutil.HTTPRequestRecorderWithChan{Pattern: testutil.MetricV2Endpoint, ReqChan: make(chan []byte, 100)}
26+
tracesRec := &testutil.HTTPRequestRecorderWithChan{Pattern: testutil.TraceEndpoint, ReqChan: make(chan []byte, 100)}
27+
server := testutil.DatadogServerMock(seriesRec.HandlerFunc, tracesRec.HandlerFunc)
28+
defer server.Close()
29+
t.Setenv("SERVER_URL", server.URL)
30+
31+
// 2. Start in-process collector
32+
factories := getIntegrationTestComponents(t)
33+
app := getIntegrationTestCollector(t, "integration_test_internal_metrics_config.yaml", factories)
34+
go func() {
35+
assert.NoError(t, app.Run(context.Background()))
36+
}()
37+
defer app.Shutdown()
38+
39+
waitForReadiness(app)
40+
41+
// 3. Generate and send traces
42+
sendTraces(t)
43+
44+
// 4. Validate Datadog trace agent & OTel internal metrics are sent to the mock server
45+
expectedMetrics := map[string]struct{}{
46+
// Datadog internal metrics on trace and stats writers
47+
"otelcol_datadog_otlp_translator_resources_missing_source": {},
48+
"otelcol_datadog_trace_agent_stats_writer_bytes": {},
49+
"otelcol_datadog_trace_agent_stats_writer_retries": {},
50+
"otelcol_datadog_trace_agent_stats_writer_stats_buckets": {},
51+
"otelcol_datadog_trace_agent_stats_writer_stats_entries": {},
52+
"otelcol_datadog_trace_agent_stats_writer_payloads": {},
53+
"otelcol_datadog_trace_agent_stats_writer_client_payloads": {},
54+
"otelcol_datadog_trace_agent_stats_writer_errors": {},
55+
"otelcol_datadog_trace_agent_stats_writer_splits": {},
56+
"otelcol_datadog_trace_agent_trace_writer_bytes": {},
57+
"otelcol_datadog_trace_agent_trace_writer_retries": {},
58+
"otelcol_datadog_trace_agent_trace_writer_spans": {},
59+
"otelcol_datadog_trace_agent_trace_writer_traces": {},
60+
"otelcol_datadog_trace_agent_trace_writer_payloads": {},
61+
"otelcol_datadog_trace_agent_trace_writer_errors": {},
62+
"otelcol_datadog_trace_agent_trace_writer_events": {},
63+
64+
// OTel collector internal metrics
65+
"otelcol_process_memory_rss": {},
66+
"otelcol_process_runtime_total_sys_memory_bytes": {},
67+
"otelcol_process_uptime": {},
68+
"otelcol_process_cpu_seconds": {},
69+
"otelcol_process_runtime_heap_alloc_bytes": {},
70+
"otelcol_process_runtime_total_alloc_bytes": {},
71+
"otelcol_receiver_accepted_metric_points": {},
72+
"otelcol_receiver_accepted_spans": {},
73+
"otelcol_exporter_queue_capacity": {},
74+
"otelcol_exporter_queue_size": {},
75+
"otelcol_exporter_sent_spans": {},
76+
"otelcol_exporter_sent_metric_points": {},
77+
}
78+
79+
metricMap := make(map[string]series)
80+
for len(metricMap) < len(expectedMetrics) {
81+
select {
82+
case <-tracesRec.ReqChan:
83+
// Drain the channel, no need to look into the traces
84+
case metricsBytes := <-seriesRec.ReqChan:
85+
var metrics seriesSlice
86+
gz := getGzipReader(t, metricsBytes)
87+
dec := json.NewDecoder(gz)
88+
assert.NoError(t, dec.Decode(&metrics))
89+
for _, s := range metrics.Series {
90+
if _, ok := expectedMetrics[s.Metric]; ok {
91+
metricMap[s.Metric] = s
92+
}
93+
}
94+
case <-time.After(60 * time.Second):
95+
t.Fail()
96+
}
97+
}
98+
}

0 commit comments

Comments
 (0)