Skip to content

Commit e341bc2

Browse files
authored
Merge branch 'main' into main
2 parents 3b28f9d + dda6bf9 commit e341bc2

File tree

142 files changed

+662
-398
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

142 files changed

+662
-398
lines changed
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Use this changelog template to create an entry for release notes.
2+
3+
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
4+
change_type: enhancement
5+
6+
# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
7+
component: azureauthextension
8+
9+
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
10+
note: Add implement for extensionauth.HTTPClient and extensionauth.Server interface functions.
11+
12+
# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
13+
issues: [39178]
14+
15+
# (Optional) One or more lines of additional information to render under the primary note.
16+
# These lines will be padded with 2 spaces and then inserted directly into the document.
17+
# Use pipe (|) for multiline entries.
18+
subtext:
19+
20+
# If your change doesn't affect end users or the exported elements of any package,
21+
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
22+
# Optional: The change log or logs in which this entry should be included.
23+
# e.g. '[user]' or '[user, api]'
24+
# Include 'user' if the change is relevant to end users.
25+
# Include 'api' if there is a change to a library API.
26+
# Default: '[user]'
27+
change_logs: []

connector/datadogconnector/connector_native_test.go

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -102,10 +102,7 @@ func TestContainerTagsNative(t *testing.T) {
102102
err = connector.ConsumeTraces(context.Background(), trace2)
103103
assert.NoError(t, err)
104104

105-
for {
106-
if len(metricsSink.AllMetrics()) > 0 {
107-
break
108-
}
105+
for len(metricsSink.AllMetrics()) == 0 {
109106
time.Sleep(100 * time.Millisecond)
110107
}
111108

connector/datadogconnector/connector_test.go

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -169,10 +169,7 @@ func TestContainerTags(t *testing.T) {
169169
})
170170
assert.Equal(t, 3, count)
171171

172-
for {
173-
if len(metricsSink.AllMetrics()) > 0 {
174-
break
175-
}
172+
for len(metricsSink.AllMetrics()) == 0 {
176173
time.Sleep(100 * time.Millisecond)
177174
}
178175

@@ -228,10 +225,7 @@ func testReceiveResourceSpansV2(t *testing.T, enableReceiveResourceSpansV2 bool)
228225
err = connector.ConsumeTraces(context.Background(), trace)
229226
assert.NoError(t, err)
230227

231-
for {
232-
if len(metricsSink.AllMetrics()) > 0 {
233-
break
234-
}
228+
for len(metricsSink.AllMetrics()) == 0 {
235229
time.Sleep(100 * time.Millisecond)
236230
}
237231

@@ -287,10 +281,7 @@ func testOperationAndResourceNameV2(t *testing.T, enableOperationAndResourceName
287281
err = connector.ConsumeTraces(context.Background(), trace)
288282
assert.NoError(t, err)
289283

290-
for {
291-
if len(metricsSink.AllMetrics()) > 0 {
292-
break
293-
}
284+
for len(metricsSink.AllMetrics()) == 0 {
294285
time.Sleep(100 * time.Millisecond)
295286
}
296287

connector/failoverconnector/logs.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,6 @@ func newLogsToLogs(set connector.Settings, cfg component.Config, logs consumer.L
109109
return &logsFailover{
110110
config: config,
111111
failover: failover,
112-
logger: set.TelemetrySettings.Logger,
112+
logger: set.Logger,
113113
}, nil
114114
}

connector/failoverconnector/metrics.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,6 @@ func newMetricsToMetrics(set connector.Settings, cfg component.Config, metrics c
109109
return &metricsFailover{
110110
config: config,
111111
failover: failover,
112-
logger: set.TelemetrySettings.Logger,
112+
logger: set.Logger,
113113
}, nil
114114
}

connector/failoverconnector/traces.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,6 @@ func newTracesToTraces(set connector.Settings, cfg component.Config, traces cons
110110
return &tracesFailover{
111111
config: config,
112112
failover: failover,
113-
logger: set.TelemetrySettings.Logger,
113+
logger: set.Logger,
114114
}, nil
115115
}

connector/otlpjsonconnector/logs.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ type connectorLogs struct {
2424

2525
// newLogsConnector is a function to create a new connector for logs extraction
2626
func newLogsConnector(set connector.Settings, config component.Config, logsConsumer consumer.Logs) *connectorLogs {
27-
set.TelemetrySettings.Logger.Info("Building otlpjson connector for logs")
27+
set.Logger.Info("Building otlpjson connector for logs")
2828
cfg := config.(*Config)
2929

3030
return &connectorLogs{
3131
config: *cfg,
32-
logger: set.TelemetrySettings.Logger,
32+
logger: set.Logger,
3333
logsConsumer: logsConsumer,
3434
}
3535
}

connector/otlpjsonconnector/metrics.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,12 @@ type connectorMetrics struct {
2525

2626
// newMetricsConnector is a function to create a new connector for metrics extraction
2727
func newMetricsConnector(set connector.Settings, config component.Config, metricsConsumer consumer.Metrics) *connectorMetrics {
28-
set.TelemetrySettings.Logger.Info("Building otlpjson connector for metrics")
28+
set.Logger.Info("Building otlpjson connector for metrics")
2929
cfg := config.(*Config)
3030

3131
return &connectorMetrics{
3232
config: *cfg,
33-
logger: set.TelemetrySettings.Logger,
33+
logger: set.Logger,
3434
metricsConsumer: metricsConsumer,
3535
}
3636
}

connector/otlpjsonconnector/traces.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,12 +25,12 @@ type connectorTraces struct {
2525

2626
// newTracesConnector is a function to create a new connector for traces extraction
2727
func newTracesConnector(set connector.Settings, config component.Config, tracesConsumer consumer.Traces) *connectorTraces {
28-
set.TelemetrySettings.Logger.Info("Building otlpjson connector for traces")
28+
set.Logger.Info("Building otlpjson connector for traces")
2929
cfg := config.(*Config)
3030

3131
return &connectorTraces{
3232
config: *cfg,
33-
logger: set.TelemetrySettings.Logger,
33+
logger: set.Logger,
3434
tracesConsumer: tracesConsumer,
3535
}
3636
}

connector/routingconnector/logs.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ func newLogsConnector(
4949
}
5050

5151
return &logsConnector{
52-
logger: set.TelemetrySettings.Logger,
52+
logger: set.Logger,
5353
config: cfg,
5454
router: r,
5555
}, nil

connector/routingconnector/metrics.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ func newMetricsConnector(
5050
}
5151

5252
return &metricsConnector{
53-
logger: set.TelemetrySettings.Logger,
53+
logger: set.Logger,
5454
config: cfg,
5555
router: r,
5656
}, nil

connector/routingconnector/traces.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ func newTracesConnector(
4949
}
5050

5151
return &tracesConnector{
52-
logger: set.TelemetrySettings.Logger,
52+
logger: set.Logger,
5353
config: cfg,
5454
router: r,
5555
}, nil

connector/signaltometricsconnector/connector_test.go

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ func TestConnectorWithLogs(t *testing.T) {
131131
func BenchmarkConnectorWithTraces(b *testing.B) {
132132
factory := NewFactory()
133133
settings := connectortest.NewNopSettings(metadata.Type)
134-
settings.TelemetrySettings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
134+
settings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
135135
next, err := consumer.NewMetrics(func(context.Context, pmetric.Metrics) error {
136136
return nil
137137
})
@@ -157,7 +157,7 @@ func BenchmarkConnectorWithTraces(b *testing.B) {
157157
func BenchmarkConnectorWithMetrics(b *testing.B) {
158158
factory := NewFactory()
159159
settings := connectortest.NewNopSettings(metadata.Type)
160-
settings.TelemetrySettings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
160+
settings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
161161
next, err := consumer.NewMetrics(func(context.Context, pmetric.Metrics) error {
162162
return nil
163163
})
@@ -183,7 +183,7 @@ func BenchmarkConnectorWithMetrics(b *testing.B) {
183183
func BenchmarkConnectorWithLogs(b *testing.B) {
184184
factory := NewFactory()
185185
settings := connectortest.NewNopSettings(metadata.Type)
186-
settings.TelemetrySettings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
186+
settings.Logger = zaptest.NewLogger(b, zaptest.Level(zapcore.DebugLevel))
187187
next, err := consumer.NewMetrics(func(context.Context, pmetric.Metrics) error {
188188
return nil
189189
})
@@ -289,8 +289,8 @@ func setupConnector(
289289
t.Helper()
290290
factory := NewFactory()
291291
settings := connectortest.NewNopSettings(metadata.Type)
292-
telemetryResource(t).CopyTo(settings.TelemetrySettings.Resource)
293-
settings.TelemetrySettings.Logger = zaptest.NewLogger(t, zaptest.Level(zapcore.DebugLevel))
292+
telemetryResource(t).CopyTo(settings.Resource)
293+
settings.Logger = zaptest.NewLogger(t, zaptest.Level(zapcore.DebugLevel))
294294

295295
cfg := createDefaultConfig()
296296
cm, err := confmaptest.LoadConf(filepath.Join(testFilePath, "config.yaml"))

connector/spanmetricsconnector/connector_test.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -163,9 +163,10 @@ func verifyConsumeMetricsInput(tb testing.TB, input pmetric.Metrics, expectedTem
163163
val, ok := rm.Resource().Attributes().Get(serviceNameKey)
164164
require.True(tb, ok)
165165
serviceName := val.AsString()
166-
if serviceName == "service-a" {
166+
switch serviceName {
167+
case "service-a":
167168
numDataPoints = 2
168-
} else if serviceName == "service-b" {
169+
case "service-b":
169170
numDataPoints = 1
170171
}
171172

exporter/alertmanagerexporter/alertmanager_exporter.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ func (s *alertmanagerExporter) pushTraces(ctx context.Context, td ptrace.Traces)
189189
}
190190

191191
func (s *alertmanagerExporter) start(ctx context.Context, host component.Host) error {
192-
client, err := s.config.ClientConfig.ToClient(ctx, host, s.settings)
192+
client, err := s.config.ToClient(ctx, host, s.settings)
193193
if err != nil {
194194
return fmt.Errorf("failed to create HTTP Client: %w", err)
195195
}
@@ -209,7 +209,7 @@ func newAlertManagerExporter(cfg *Config, set component.TelemetrySettings) *aler
209209
config: cfg,
210210
settings: set,
211211
tracesMarshaler: &ptrace.JSONMarshaler{},
212-
endpoint: fmt.Sprintf("%s/api/%s/alerts", cfg.ClientConfig.Endpoint, cfg.APIVersion),
212+
endpoint: fmt.Sprintf("%s/api/%s/alerts", cfg.Endpoint, cfg.APIVersion),
213213
generatorURL: cfg.GeneratorURL,
214214
defaultSeverity: cfg.DefaultSeverity,
215215
severityAttribute: cfg.SeverityAttribute,

exporter/alertmanagerexporter/config.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ var _ component.Config = (*Config)(nil)
3030

3131
// Validate checks if the exporter configuration is valid
3232
func (cfg *Config) Validate() error {
33-
if cfg.ClientConfig.Endpoint == "" {
33+
if cfg.Endpoint == "" {
3434
return errors.New("endpoint must be non-empty")
3535
}
3636
if cfg.DefaultSeverity == "" {

exporter/alertmanagerexporter/config_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ func TestConfig_Validate(t *testing.T) {
112112
name: "NoEndpoint",
113113
cfg: func() *Config {
114114
cfg := createDefaultConfig().(*Config)
115-
cfg.ClientConfig.Endpoint = ""
115+
cfg.Endpoint = ""
116116
return cfg
117117
}(),
118118
wantErr: "endpoint must be non-empty",

exporter/awsemfexporter/datapoint.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ func (split *dataPointSplit) appendMetricData(metricVal float64, count uint64) {
138138

139139
// CalculateDeltaDatapoints retrieves the NumberDataPoint at the given index and performs rate/delta calculation if necessary.
140140
func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, calculators *emfCalculators) ([]dataPoint, bool) {
141-
metric := dps.NumberDataPointSlice.At(i)
141+
metric := dps.At(i)
142142
labels := createLabels(metric.Attributes(), instrumentationScopeName)
143143
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
144144

@@ -177,7 +177,7 @@ func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationS
177177
}
178178

179179
func (dps numberDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
180-
metric := dps.NumberDataPointSlice.At(i)
180+
metric := dps.At(i)
181181
if metric.Flags().NoRecordedValue() {
182182
return true, metric.Attributes()
183183
}
@@ -189,7 +189,7 @@ func (dps numberDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
189189

190190
// CalculateDeltaDatapoints retrieves the HistogramDataPoint at the given index.
191191
func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
192-
metric := dps.HistogramDataPointSlice.At(i)
192+
metric := dps.At(i)
193193
labels := createLabels(metric.Attributes(), instrumentationScopeName)
194194
timestamp := unixNanoToMilliseconds(metric.Timestamp())
195195

@@ -207,7 +207,7 @@ func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentati
207207
}
208208

209209
func (dps histogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
210-
metric := dps.HistogramDataPointSlice.At(i)
210+
metric := dps.At(i)
211211
if metric.Flags().NoRecordedValue() {
212212
return true, metric.Attributes()
213213
}
@@ -229,7 +229,7 @@ func (dps histogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
229229
// - Sum is only assigned to the first split to ensure the total sum of the datapoints after aggregation is correct.
230230
// - Count is accumulated based on the bucket counts within each split.
231231
func (dps exponentialHistogramDataPointSlice) CalculateDeltaDatapoints(idx int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
232-
metric := dps.ExponentialHistogramDataPointSlice.At(idx)
232+
metric := dps.At(idx)
233233

234234
const splitThreshold = 100
235235
currentBucketIndex := 0
@@ -412,7 +412,7 @@ func collectDatapointsWithNegativeBuckets(split *dataPointSplit, metric pmetric.
412412
}
413413

414414
func (dps exponentialHistogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
415-
metric := dps.ExponentialHistogramDataPointSlice.At(i)
415+
metric := dps.At(i)
416416
if metric.Flags().NoRecordedValue() {
417417
return true, metric.Attributes()
418418
}
@@ -430,7 +430,7 @@ func (dps exponentialHistogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommo
430430

431431
// CalculateDeltaDatapoints retrieves the SummaryDataPoint at the given index and perform calculation with sum and count while retain the quantile value.
432432
func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool, calculators *emfCalculators) ([]dataPoint, bool) {
433-
metric := dps.SummaryDataPointSlice.At(i)
433+
metric := dps.At(i)
434434
labels := createLabels(metric.Attributes(), instrumentationScopeName)
435435
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
436436

@@ -485,7 +485,7 @@ func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentation
485485
}
486486

487487
func (dps summaryDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
488-
metric := dps.SummaryDataPointSlice.At(i)
488+
metric := dps.At(i)
489489
if metric.Flags().NoRecordedValue() {
490490
return true, metric.Attributes()
491491
}

exporter/awsemfexporter/datapoint_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2052,7 +2052,7 @@ func TestGetDataPoints(t *testing.T) {
20522052
expectedDPS := tc.expectedDatapointSlice.(numberDataPointSlice)
20532053
assert.Equal(t, expectedDPS.deltaMetricMetadata, convertedDPS.deltaMetricMetadata)
20542054
assert.Equal(t, 1, convertedDPS.Len())
2055-
dp := convertedDPS.NumberDataPointSlice.At(0)
2055+
dp := convertedDPS.At(0)
20562056
switch dp.ValueType() {
20572057
case pmetric.NumberDataPointValueTypeDouble:
20582058
assert.Equal(t, 0.1, dp.DoubleValue())
@@ -2062,14 +2062,14 @@ func TestGetDataPoints(t *testing.T) {
20622062
assert.Equal(t, tc.expectedAttributes, dp.Attributes().AsRaw())
20632063
case histogramDataPointSlice:
20642064
assert.Equal(t, 1, convertedDPS.Len())
2065-
dp := convertedDPS.HistogramDataPointSlice.At(0)
2065+
dp := convertedDPS.At(0)
20662066
assert.Equal(t, 35.0, dp.Sum())
20672067
assert.Equal(t, uint64(18), dp.Count())
20682068
assert.Equal(t, []float64{0, 10}, dp.ExplicitBounds().AsRaw())
20692069
assert.Equal(t, tc.expectedAttributes, dp.Attributes().AsRaw())
20702070
case exponentialHistogramDataPointSlice:
20712071
assert.Equal(t, 1, convertedDPS.Len())
2072-
dp := convertedDPS.ExponentialHistogramDataPointSlice.At(0)
2072+
dp := convertedDPS.At(0)
20732073
assert.Equal(t, float64(0), dp.Sum())
20742074
assert.Equal(t, uint64(4), dp.Count())
20752075
assert.Equal(t, []uint64{1, 0, 1}, dp.Positive().BucketCounts().AsRaw())
@@ -2080,7 +2080,7 @@ func TestGetDataPoints(t *testing.T) {
20802080
expectedDPS := tc.expectedDatapointSlice.(summaryDataPointSlice)
20812081
assert.Equal(t, expectedDPS.deltaMetricMetadata, convertedDPS.deltaMetricMetadata)
20822082
assert.Equal(t, 1, convertedDPS.Len())
2083-
dp := convertedDPS.SummaryDataPointSlice.At(0)
2083+
dp := convertedDPS.At(0)
20842084
assert.Equal(t, 15.0, dp.Sum())
20852085
assert.Equal(t, uint64(5), dp.Count())
20862086
assert.Equal(t, 2, dp.QuantileValues().Len())

exporter/awsemfexporter/grouped_metric.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ func addToGroupedMetric(
8686
}
8787

8888
// Extra params to use when grouping metrics
89-
metadata.groupedMetricMetadata.batchIndex = i
89+
metadata.batchIndex = i
9090
groupKey := aws.NewKey(metadata.groupedMetricMetadata, labels)
9191
if _, ok := groupedMetrics[groupKey]; ok {
9292
// if MetricName already exists in metrics map, print warning log

0 commit comments

Comments
 (0)