Skip to content

Commit b307c02

Browse files
ptodevspartan0x117
andauthored
Upgrade Agent to Collector 0.87 (grafana#5529)
* Upgrade Agent to Collector 0.87 * Parametrize the OTel version in docs. * Document another batch processor metric * Don't accept routing keys for metrics. * Add tests for otelcol.receiver.kafka --------- Co-authored-by: Mischa Thompson <[email protected]>
1 parent 4c5ae43 commit b307c02

File tree

28 files changed

+1000
-491
lines changed

28 files changed

+1000
-491
lines changed

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,11 @@ Main (unreleased)
6565

6666
- Fix validation issue with ServiceMonitors when scrape timeout is greater than interval. (@captncraig)
6767

68+
- Static mode's spanmetrics processor will now prune histograms when the dimension cache is pruned.
69+
Dimension cache was always pruned but histograms were not being pruned. This caused metric series
70+
created by the spanmetrics processor to grow unbounded. Only static mode has this issue. Flow mode's
71+
`otelcol.connector.spanmetrics` does not have this bug. (@nijave)
72+
6873
### Enhancements
6974

7075
- The `loki.write` WAL now has snappy compression enabled by default. (@thepalbi)
@@ -75,6 +80,11 @@ Main (unreleased)
7580

7681
- The `loki.source.docker` component now allows connecting to Docker daemons
7782
over HTTP(S) and setting up TLS credentials. (@tpaschalis)
83+
84+
- Upgrade OpenTelemetry Collector packages to version 0.87 (@ptodev):
85+
- `otelcol.receiver.kafka` has a new `header_extraction` block to extract headers from Kafka records.
86+
- `otelcol.receiver.kafka` has a new `version` argument to change the version of
87+
the SASL Protocol for SASL authentication.
7888

7989
v0.37.2 (2023-10-16)
8090
-----------------

component/otelcol/auth/auth.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,10 @@ func (a *Auth) Update(args component.Arguments) error {
139139

140140
TracerProvider: a.opts.Tracer,
141141
MeterProvider: metric.NewMeterProvider(metric.WithReader(promExporter)),
142+
143+
ReportComponentStatus: func(*otelcomponent.StatusEvent) error {
144+
return nil
145+
},
142146
},
143147

144148
BuildInfo: otelcomponent.BuildInfo{

component/otelcol/connector/connector.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,10 @@ func (p *Connector) Update(args component.Arguments) error {
149149

150150
TracerProvider: p.opts.Tracer,
151151
MeterProvider: metric.NewMeterProvider(metric.WithReader(promExporter)),
152+
153+
ReportComponentStatus: func(*otelcomponent.StatusEvent) error {
154+
return nil
155+
},
152156
},
153157

154158
BuildInfo: otelcomponent.BuildInfo{

component/otelcol/connector/spanmetrics/types.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ const (
3737
// The unit is a private type in an internal Otel package,
3838
// so we need to convert it to a map and then back to the internal type.
3939
// ConvertMetricUnit matches the Unit type in this internal package:
40-
// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.85.0/connector/spanmetricsconnector/internal/metrics/unit.go
40+
// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/v0.87.0/connector/spanmetricsconnector/internal/metrics/unit.go
4141
func ConvertMetricUnit(unit string) (map[string]interface{}, error) {
4242
switch unit {
4343
case MetricsUnitMilliseconds:

component/otelcol/exporter/exporter.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,10 @@ func (e *Exporter) Update(args component.Arguments) error {
140140

141141
TracerProvider: e.opts.Tracer,
142142
MeterProvider: metric.NewMeterProvider(metricOpts...),
143+
144+
ReportComponentStatus: func(*otelcomponent.StatusEvent) error {
145+
return nil
146+
},
143147
},
144148

145149
BuildInfo: otelcomponent.BuildInfo{

component/otelcol/exporter/loadbalancing/loadbalancing.go

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
package loadbalancing
33

44
import (
5+
"fmt"
56
"time"
67

78
"github.com/alecthomas/units"
@@ -48,6 +49,7 @@ type Arguments struct {
4849
var (
4950
_ exporter.Arguments = Arguments{}
5051
_ river.Defaulter = &Arguments{}
52+
_ river.Validator = &Arguments{}
5153
)
5254

5355
var (
@@ -72,6 +74,22 @@ func (args *Arguments) SetToDefault() {
7274
*args = DefaultArguments
7375
}
7476

77+
// Validate implements river.Validator.
78+
func (args *Arguments) Validate() error {
79+
//TODO(ptodev): Add support for "resource" and "metric" routing keys later.
80+
// The reason we can't add them yet is that otelcol.exporter.loadbalancing
81+
// is labeled as "beta", but those routing keys are experimental.
82+
// We need a way to label otelcol.exporter.loadbalancing as "beta"
83+
// for logs and traces, but "experimental" for metrics.
84+
switch args.RoutingKey {
85+
case "service", "traceID":
86+
// The routing key is valid.
87+
default:
88+
return fmt.Errorf("invalid routing key %q", args.RoutingKey)
89+
}
90+
return nil
91+
}
92+
7593
// Convert implements exporter.Arguments.
7694
func (args Arguments) Convert() (otelcomponent.Config, error) {
7795
return &loadbalancingexporter.Config{

component/otelcol/extension/extension.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,10 @@ func (e *Extension) Update(args component.Arguments) error {
116116

117117
TracerProvider: e.opts.Tracer,
118118
MeterProvider: metric.NewMeterProvider(metric.WithReader(promExporter)),
119+
120+
ReportComponentStatus: func(*otelcomponent.StatusEvent) error {
121+
return nil
122+
},
119123
},
120124

121125
BuildInfo: otelcomponent.BuildInfo{

component/otelcol/processor/processor.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,10 @@ func (p *Processor) Update(args component.Arguments) error {
135135

136136
TracerProvider: p.opts.Tracer,
137137
MeterProvider: metric.NewMeterProvider(metric.WithReader(promExporter)),
138+
139+
ReportComponentStatus: func(*otelcomponent.StatusEvent) error {
140+
return nil
141+
},
138142
},
139143

140144
BuildInfo: otelcomponent.BuildInfo{

component/otelcol/receiver/kafka/kafka.go

Lines changed: 77 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"github.com/grafana/agent/component/otelcol/receiver"
1010
otel_service "github.com/grafana/agent/service/otel"
1111
"github.com/grafana/river/rivertypes"
12+
"github.com/mitchellh/mapstructure"
1213
"github.com/open-telemetry/opentelemetry-collector-contrib/exporter/kafkaexporter"
1314
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/kafkareceiver"
1415
otelcomponent "go.opentelemetry.io/collector/component"
@@ -38,10 +39,11 @@ type Arguments struct {
3839
ClientID string `river:"client_id,attr,optional"`
3940
InitialOffset string `river:"initial_offset,attr,optional"`
4041

41-
Authentication AuthenticationArguments `river:"authentication,block,optional"`
42-
Metadata MetadataArguments `river:"metadata,block,optional"`
43-
AutoCommit AutoCommitArguments `river:"autocommit,block,optional"`
44-
MessageMarking MessageMarkingArguments `river:"message_marking,block,optional"`
42+
Authentication AuthenticationArguments `river:"authentication,block,optional"`
43+
Metadata MetadataArguments `river:"metadata,block,optional"`
44+
AutoCommit AutoCommitArguments `river:"autocommit,block,optional"`
45+
MessageMarking MessageMarkingArguments `river:"message_marking,block,optional"`
46+
HeaderExtraction HeaderExtraction `river:"header_extraction,block,optional"`
4547

4648
// DebugMetrics configures component internal metrics. Optional.
4749
DebugMetrics otelcol.DebugMetricsArguments `river:"debug_metrics,block,optional"`
@@ -79,6 +81,10 @@ var DefaultArguments = Arguments{
7981
AfterExecution: false,
8082
IncludeUnsuccessful: false,
8183
},
84+
HeaderExtraction: HeaderExtraction{
85+
ExtractHeaders: false,
86+
Headers: []string{},
87+
},
8288
}
8389

8490
// SetToDefault implements river.Defaulter.
@@ -88,20 +94,28 @@ func (args *Arguments) SetToDefault() {
8894

8995
// Convert implements receiver.Arguments.
9096
func (args Arguments) Convert() (otelcomponent.Config, error) {
91-
return &kafkareceiver.Config{
92-
Brokers: args.Brokers,
93-
ProtocolVersion: args.ProtocolVersion,
94-
Topic: args.Topic,
95-
Encoding: args.Encoding,
96-
GroupID: args.GroupID,
97-
ClientID: args.ClientID,
98-
InitialOffset: args.InitialOffset,
99-
100-
Authentication: args.Authentication.Convert(),
101-
Metadata: args.Metadata.Convert(),
102-
AutoCommit: args.AutoCommit.Convert(),
103-
MessageMarking: args.MessageMarking.Convert(),
104-
}, nil
97+
input := make(map[string]interface{})
98+
input["auth"] = args.Authentication.Convert()
99+
100+
var result kafkareceiver.Config
101+
err := mapstructure.Decode(input, &result)
102+
if err != nil {
103+
return nil, err
104+
}
105+
106+
result.Brokers = args.Brokers
107+
result.ProtocolVersion = args.ProtocolVersion
108+
result.Topic = args.Topic
109+
result.Encoding = args.Encoding
110+
result.GroupID = args.GroupID
111+
result.ClientID = args.ClientID
112+
result.InitialOffset = args.InitialOffset
113+
result.Metadata = args.Metadata.Convert()
114+
result.AutoCommit = args.AutoCommit.Convert()
115+
result.MessageMarking = args.MessageMarking.Convert()
116+
result.HeaderExtraction = args.HeaderExtraction.Convert()
117+
118+
return &result, nil
105119
}
106120

107121
// Extensions implements receiver.Arguments.
@@ -128,26 +142,26 @@ type AuthenticationArguments struct {
128142
}
129143

130144
// Convert converts args into the upstream type.
131-
func (args AuthenticationArguments) Convert() kafkaexporter.Authentication {
132-
var res kafkaexporter.Authentication
145+
func (args AuthenticationArguments) Convert() map[string]interface{} {
146+
auth := make(map[string]interface{})
133147

134148
if args.Plaintext != nil {
135149
conv := args.Plaintext.Convert()
136-
res.PlainText = &conv
150+
auth["plain_text"] = &conv
137151
}
138152
if args.SASL != nil {
139153
conv := args.SASL.Convert()
140-
res.SASL = &conv
154+
auth["sasl"] = &conv
141155
}
142156
if args.TLS != nil {
143-
res.TLS = args.TLS.Convert()
157+
auth["tls"] = args.TLS.Convert()
144158
}
145159
if args.Kerberos != nil {
146160
conv := args.Kerberos.Convert()
147-
res.Kerberos = &conv
161+
auth["kerberos"] = &conv
148162
}
149163

150-
return res
164+
return auth
151165
}
152166

153167
// PlaintextArguments configures plaintext authentication against the Kafka
@@ -158,10 +172,10 @@ type PlaintextArguments struct {
158172
}
159173

160174
// Convert converts args into the upstream type.
161-
func (args PlaintextArguments) Convert() kafkaexporter.PlainTextConfig {
162-
return kafkaexporter.PlainTextConfig{
163-
Username: args.Username,
164-
Password: string(args.Password),
175+
func (args PlaintextArguments) Convert() map[string]interface{} {
176+
return map[string]interface{}{
177+
"username": args.Username,
178+
"password": string(args.Password),
165179
}
166180
}
167181

@@ -170,16 +184,18 @@ type SASLArguments struct {
170184
Username string `river:"username,attr"`
171185
Password rivertypes.Secret `river:"password,attr"`
172186
Mechanism string `river:"mechanism,attr"`
187+
Version int `river:"version,attr,optional"`
173188
AWSMSK AWSMSKArguments `river:"aws_msk,block,optional"`
174189
}
175190

176191
// Convert converts args into the upstream type.
177-
func (args SASLArguments) Convert() kafkaexporter.SASLConfig {
178-
return kafkaexporter.SASLConfig{
179-
Username: args.Username,
180-
Password: string(args.Password),
181-
Mechanism: args.Mechanism,
182-
AWSMSK: args.AWSMSK.Convert(),
192+
func (args SASLArguments) Convert() map[string]interface{} {
193+
return map[string]interface{}{
194+
"username": args.Username,
195+
"password": string(args.Password),
196+
"mechanism": args.Mechanism,
197+
"version": args.Version,
198+
"aws_msk": args.AWSMSK.Convert(),
183199
}
184200
}
185201

@@ -191,10 +207,10 @@ type AWSMSKArguments struct {
191207
}
192208

193209
// Convert converts args into the upstream type.
194-
func (args AWSMSKArguments) Convert() kafkaexporter.AWSMSKConfig {
195-
return kafkaexporter.AWSMSKConfig{
196-
Region: args.Region,
197-
BrokerAddr: args.BrokerAddr,
210+
func (args AWSMSKArguments) Convert() map[string]interface{} {
211+
return map[string]interface{}{
212+
"region": args.Region,
213+
"broker_addr": args.BrokerAddr,
198214
}
199215
}
200216

@@ -211,15 +227,15 @@ type KerberosArguments struct {
211227
}
212228

213229
// Convert converts args into the upstream type.
214-
func (args KerberosArguments) Convert() kafkaexporter.KerberosConfig {
215-
return kafkaexporter.KerberosConfig{
216-
ServiceName: args.ServiceName,
217-
Realm: args.Realm,
218-
UseKeyTab: args.UseKeyTab,
219-
Username: args.Username,
220-
Password: string(args.Password),
221-
ConfigPath: args.ConfigPath,
222-
KeyTabPath: args.KeyTabPath,
230+
func (args KerberosArguments) Convert() map[string]interface{} {
231+
return map[string]interface{}{
232+
"service_name": args.ServiceName,
233+
"realm": args.Realm,
234+
"use_keytab": args.UseKeyTab,
235+
"username": args.Username,
236+
"password": string(args.Password),
237+
"config_file": args.ConfigPath,
238+
"keytab_file": args.KeyTabPath,
223239
}
224240
}
225241

@@ -283,6 +299,19 @@ func (args MessageMarkingArguments) Convert() kafkareceiver.MessageMarking {
283299
}
284300
}
285301

302+
type HeaderExtraction struct {
303+
ExtractHeaders bool `river:"extract_headers,attr,optional"`
304+
Headers []string `river:"headers,attr,optional"`
305+
}
306+
307+
// Convert converts HeaderExtraction into the upstream type.
308+
func (h HeaderExtraction) Convert() kafkareceiver.HeaderExtraction {
309+
return kafkareceiver.HeaderExtraction{
310+
ExtractHeaders: h.ExtractHeaders,
311+
Headers: h.Headers,
312+
}
313+
}
314+
286315
// DebugMetricsConfig implements receiver.Arguments.
287316
func (args Arguments) DebugMetricsConfig() otelcol.DebugMetricsArguments {
288317
return args.DebugMetrics

0 commit comments

Comments
 (0)