diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b83ee4d268c..a86d9b1960f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,8 +14,8 @@ Main (unreleased) - `otelcol.receiver.prometheus` will drop all `otel_scope_info` metrics when converting them to OTLP. (@wildum) - If the `otel_scope_info` metric has labels `otel_scope_name` and `otel_scope_version`, - their values will be used to set OTLP Instrumentation Scope name and version respectively. - - Labels of `otel_scope_info` metrics other than `otel_scope_name` and `otel_scope_version` + their values will be used to set OTLP Instrumentation Scope name and version respectively. + - Labels of `otel_scope_info` metrics other than `otel_scope_name` and `otel_scope_version` are added as scope attributes with the matching name and version. - The `target` block in `prometheus.exporter.blackbox` requires a mandatory `name` @@ -57,7 +57,7 @@ Main (unreleased) - Added links between compatible components in the documentation to make it easier to discover them. (@thampiotr) - + - Allow defining `HTTPClientConfig` for `discovery.ec2`. (@cmbrad) - The `remote.http` component can optionally define a request body. (@tpaschalis) @@ -71,8 +71,8 @@ Main (unreleased) - Added 'country' mmdb-type to log pipeline-stage geoip. (@superstes) - Azure exporter enhancements for flow and static mode, (@kgeckhart) - - Allows for pulling metrics at the Azure subscription level instead of resource by resource - - Disable dimension validation by default to reduce the number of exporter instances needed for full dimension coverage + - Allows for pulling metrics at the Azure subscription level instead of resource by resource + - Disable dimension validation by default to reduce the number of exporter instances needed for full dimension coverage - Add `max_cache_size` to `prometheus.relabel` to allow configurability instead of hard coded 100,000. (@mattdurham) @@ -81,11 +81,11 @@ Main (unreleased) ### Bugfixes - Update `pyroscope.ebpf` to fix a logical bug causing to profile to many kthreads instead of regular processes https://github.com/grafana/pyroscope/pull/2778 (@korniltsev) - + - Update `pyroscope.ebpf` to produce more optimal pprof profiles for python processes https://github.com/grafana/pyroscope/pull/2788 (@korniltsev) - In Static mode's `traces` subsystem, `spanmetrics` used to be generated prior to load balancing. - This could lead to inaccurate metrics. This issue only affects Agents using both `spanmetrics` and + This could lead to inaccurate metrics. This issue only affects Agents using both `spanmetrics` and `load_balancing`, when running in a load balanced cluster with more than one Agent instance. (@ptodev) - Fixes `loki.source.docker` a behavior that synced an incomplete list of targets to the tailer manager. (@FerdinandvHagen) @@ -94,6 +94,8 @@ Main (unreleased) - Add staleness tracking to labelstore to reduce memory usage. (@mattdurham) +- Fix issue where `prometheus.exporter.kafka` would crash when configuring `sasl_password`. (@rfratto) + ### Other changes - Bump github.com/IBM/sarama from v1.41.2 to v1.42.1 diff --git a/component/prometheus/exporter/kafka/kafka.go b/component/prometheus/exporter/kafka/kafka.go index f146c40bae3c..f68985b50d2c 100644 --- a/component/prometheus/exporter/kafka/kafka.go +++ b/component/prometheus/exporter/kafka/kafka.go @@ -9,7 +9,8 @@ import ( "github.com/grafana/agent/component/prometheus/exporter" "github.com/grafana/agent/pkg/integrations" "github.com/grafana/agent/pkg/integrations/kafka_exporter" - config_util "github.com/prometheus/common/config" + "github.com/grafana/river/rivertypes" + "github.com/prometheus/common/config" ) var DefaultArguments = Arguments{ @@ -24,28 +25,28 @@ var DefaultArguments = Arguments{ } type Arguments struct { - Instance string `river:"instance,attr,optional"` - KafkaURIs []string `river:"kafka_uris,attr,optional"` - UseSASL bool `river:"use_sasl,attr,optional"` - UseSASLHandshake bool `river:"use_sasl_handshake,attr,optional"` - SASLUsername string `river:"sasl_username,attr,optional"` - SASLPassword config_util.Secret `river:"sasl_password,attr,optional"` - SASLMechanism string `river:"sasl_mechanism,attr,optional"` - UseTLS bool `river:"use_tls,attr,optional"` - CAFile string `river:"ca_file,attr,optional"` - CertFile string `river:"cert_file,attr,optional"` - KeyFile string `river:"key_file,attr,optional"` - InsecureSkipVerify bool `river:"insecure_skip_verify,attr,optional"` - KafkaVersion string `river:"kafka_version,attr,optional"` - UseZooKeeperLag bool `river:"use_zookeeper_lag,attr,optional"` - ZookeeperURIs []string `river:"zookeeper_uris,attr,optional"` - ClusterName string `river:"kafka_cluster_name,attr,optional"` - MetadataRefreshInterval string `river:"metadata_refresh_interval,attr,optional"` - AllowConcurrent bool `river:"allow_concurrency,attr,optional"` - MaxOffsets int `river:"max_offsets,attr,optional"` - PruneIntervalSeconds int `river:"prune_interval_seconds,attr,optional"` - TopicsFilter string `river:"topics_filter_regex,attr,optional"` - GroupFilter string `river:"groups_filter_regex,attr,optional"` + Instance string `river:"instance,attr,optional"` + KafkaURIs []string `river:"kafka_uris,attr,optional"` + UseSASL bool `river:"use_sasl,attr,optional"` + UseSASLHandshake bool `river:"use_sasl_handshake,attr,optional"` + SASLUsername string `river:"sasl_username,attr,optional"` + SASLPassword rivertypes.Secret `river:"sasl_password,attr,optional"` + SASLMechanism string `river:"sasl_mechanism,attr,optional"` + UseTLS bool `river:"use_tls,attr,optional"` + CAFile string `river:"ca_file,attr,optional"` + CertFile string `river:"cert_file,attr,optional"` + KeyFile string `river:"key_file,attr,optional"` + InsecureSkipVerify bool `river:"insecure_skip_verify,attr,optional"` + KafkaVersion string `river:"kafka_version,attr,optional"` + UseZooKeeperLag bool `river:"use_zookeeper_lag,attr,optional"` + ZookeeperURIs []string `river:"zookeeper_uris,attr,optional"` + ClusterName string `river:"kafka_cluster_name,attr,optional"` + MetadataRefreshInterval string `river:"metadata_refresh_interval,attr,optional"` + AllowConcurrent bool `river:"allow_concurrency,attr,optional"` + MaxOffsets int `river:"max_offsets,attr,optional"` + PruneIntervalSeconds int `river:"prune_interval_seconds,attr,optional"` + TopicsFilter string `river:"topics_filter_regex,attr,optional"` + GroupFilter string `river:"groups_filter_regex,attr,optional"` } func init() { @@ -93,7 +94,7 @@ func (a *Arguments) Convert() *kafka_exporter.Config { UseSASL: a.UseSASL, UseSASLHandshake: a.UseSASLHandshake, SASLUsername: a.SASLUsername, - SASLPassword: a.SASLPassword, + SASLPassword: config.Secret(a.SASLPassword), SASLMechanism: a.SASLMechanism, UseTLS: a.UseTLS, CAFile: a.CAFile, diff --git a/component/prometheus/exporter/kafka/kafka_test.go b/component/prometheus/exporter/kafka/kafka_test.go index 26f321dc39fa..4209da21cb7d 100644 --- a/component/prometheus/exporter/kafka/kafka_test.go +++ b/component/prometheus/exporter/kafka/kafka_test.go @@ -107,3 +107,15 @@ func TestCustomizeTarget(t *testing.T) { require.Equal(t, 1, len(newTargets)) require.Equal(t, "example", newTargets[0]["instance"]) } + +func TestSASLPassword(t *testing.T) { // #6044 + var exampleRiverConfig = ` + kafka_uris = ["broker1"] + use_sasl = true + sasl_password = "foobar" + ` + + var args Arguments + err := river.Unmarshal([]byte(exampleRiverConfig), &args) + require.NoError(t, err) +} diff --git a/converter/internal/staticconvert/internal/build/kafka_exporter.go b/converter/internal/staticconvert/internal/build/kafka_exporter.go index 25310e35a5f4..16be4275ddce 100644 --- a/converter/internal/staticconvert/internal/build/kafka_exporter.go +++ b/converter/internal/staticconvert/internal/build/kafka_exporter.go @@ -4,6 +4,7 @@ import ( "github.com/grafana/agent/component/discovery" "github.com/grafana/agent/component/prometheus/exporter/kafka" "github.com/grafana/agent/pkg/integrations/kafka_exporter" + "github.com/grafana/river/rivertypes" ) func (b *IntegrationsConfigBuilder) appendKafkaExporter(config *kafka_exporter.Config, instanceKey *string) discovery.Exports { @@ -17,7 +18,7 @@ func toKafkaExporter(config *kafka_exporter.Config) *kafka.Arguments { UseSASL: config.UseSASL, UseSASLHandshake: config.UseSASLHandshake, SASLUsername: config.SASLUsername, - SASLPassword: config.SASLPassword, + SASLPassword: rivertypes.Secret(config.SASLPassword), SASLMechanism: config.SASLMechanism, UseTLS: config.UseTLS, CAFile: config.CAFile,