Skip to content

Commit

Permalink
Merge branch 'main' into fix-otel-processor-filter-doc
Browse files Browse the repository at this point in the history
  • Loading branch information
tpaschalis authored Jan 3, 2024
2 parents 6ddad59 + b1dd499 commit 30ead4c
Show file tree
Hide file tree
Showing 48 changed files with 971 additions and 264 deletions.
17 changes: 17 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ Main (unreleased)
- The `target` block in `prometheus.exporter.blackbox` requires a mandatory `name`
argument instead of a block label. (@hainenber)

- In the azure exporter, dimension options will no longer be validated by the Azure API. (@kgeckhart)
- This change will not break any existing configurations and you can opt in to validation via the `validate_dimensions` configuration option.
- Before this change, pulling metrics for azure resources with variable dimensions required one configuration per metric + dimension combination to avoid an error.
- After this change, you can include all metrics and dimensions in a single configuration and the Azure APIs will only return dimensions which are valid for the various metrics.

### Enhancements

- Flow Windows service: Support environment variables. (@jkroepke)
Expand All @@ -38,6 +43,8 @@ Main (unreleased)
Previously, only `remote.*` and `local.*` components could be referenced
without a circular dependency. (@rfratto)

- Add support for Basic Auth-secured connection with Elasticsearch cluster using `prometheus.exporter.elasticsearch`. (@hainenber)

- Add a `resource_to_telemetry_conversion` argument to `otelcol.exporter.prometheus`
for converting resource attributes to Prometheus labels. (@hainenber)

Expand All @@ -63,6 +70,14 @@ Main (unreleased)

- Added 'country' mmdb-type to log pipeline-stage geoip. (@superstes)

- Azure exporter enhancements for flow and static mode, (@kgeckhart)
- Allows for pulling metrics at the Azure subscription level instead of resource by resource
- Disable dimension validation by default to reduce the number of exporter instances needed for full dimension coverage

- Add `max_cache_size` to `prometheus.relabel` to allow configurability instead of hard coded 100,000. (@mattdurham)

- Add support for `http_sd_config` within a `scrape_config` for prometheus to flow config conversion. (@erikbaranowski)

### Bugfixes

- Update `pyroscope.ebpf` to fix a logical bug causing to profile to many kthreads instead of regular processes https://github.com/grafana/pyroscope/pull/2778 (@korniltsev)
Expand All @@ -83,6 +98,8 @@ Main (unreleased)

- Bump github.com/IBM/sarama from v1.41.2 to v1.42.1

- Attatch unique Agent ID header to remote-write requests. (@captncraig)

v0.38.1 (2023-11-30)
--------------------

Expand Down
2 changes: 2 additions & 0 deletions cmd/grafana-agent/entrypoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/gorilla/mux"
"github.com/grafana/agent/internal/agentseed"
"github.com/grafana/agent/pkg/config"
"github.com/grafana/agent/pkg/logs"
"github.com/grafana/agent/pkg/metrics"
Expand Down Expand Up @@ -98,6 +99,7 @@ func NewEntrypoint(logger *server.Logger, cfg *config.Config, reloader Reloader)
return nil, err
}

agentseed.Init("", logger)
ep.reporter, err = usagestats.NewReporter(logger)
if err != nil {
return nil, err
Expand Down
2 changes: 2 additions & 0 deletions cmd/internal/flowmode/cmd_run.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"github.com/grafana/agent/component"
"github.com/grafana/agent/converter"
convert_diag "github.com/grafana/agent/converter/diag"
"github.com/grafana/agent/internal/agentseed"
"github.com/grafana/agent/pkg/boringcrypto"
"github.com/grafana/agent/pkg/config/instrumentation"
"github.com/grafana/agent/pkg/flow"
Expand Down Expand Up @@ -248,6 +249,7 @@ func (fr *flowRun) Run(configPath string) error {
}

labelService := labelstore.New(l, reg)
agentseed.Init(fr.storagePath, l)

f := flow.New(flow.Options{
Logger: l,
Expand Down
8 changes: 8 additions & 0 deletions component/loki/write/write.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/grafana/agent/component/common/loki/client"
"github.com/grafana/agent/component/common/loki/limit"
"github.com/grafana/agent/component/common/loki/wal"
"github.com/grafana/agent/internal/agentseed"
)

func init() {
Expand Down Expand Up @@ -159,6 +160,13 @@ func (c *Component) Update(args component.Arguments) error {
}

cfgs := newArgs.convertClientConfigs()
uid := agentseed.Get().UID
for _, cfg := range cfgs {
if cfg.Headers == nil {
cfg.Headers = map[string]string{}
}
cfg.Headers[agentseed.HeaderName] = uid
}
walCfg := wal.Config{
Enabled: newArgs.WAL.Enabled,
MaxSegmentAge: newArgs.WAL.MaxSegmentAge,
Expand Down
6 changes: 4 additions & 2 deletions component/otelcol/config_filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@ package otelcol_test
import (
"testing"

"k8s.io/utils/ptr"

"github.com/grafana/agent/component/otelcol"

"github.com/grafana/river"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/pdata/plog"
"k8s.io/utils/pointer"
)

func TestConvertMatchProperties(t *testing.T) {
Expand Down Expand Up @@ -49,7 +51,7 @@ func TestConvertMatchProperties(t *testing.T) {
Libraries: []otelcol.InstrumentationLibrary{
{
Name: "mongo-java-driver",
Version: pointer.String("3.8.0"),
Version: ptr.To("3.8.0"),
},
},
SpanKinds: []string{"span1"},
Expand Down
8 changes: 8 additions & 0 deletions component/prometheus/exporter/azure/azure.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ type Arguments struct {
MetricNameTemplate string `river:"metric_name_template,attr,optional"`
MetricHelpTemplate string `river:"metric_help_template,attr,optional"`
AzureCloudEnvironment string `river:"azure_cloud_environment,attr,optional"`
ValidateDimensions bool `river:"validate_dimensions,attr,optional"`
Regions []string `river:"regions,attr,optional"`
}

var DefaultArguments = Arguments{
Expand All @@ -43,6 +45,10 @@ var DefaultArguments = Arguments{
MetricHelpTemplate: "Azure metric {metric} for {type} with aggregation {aggregation} as {unit}",
IncludedResourceTags: []string{"owner"},
AzureCloudEnvironment: "azurecloud",
// Dimensions do not always apply to all metrics for a service, which requires you to configure multiple exporters
// to fully monitor a service which is tedious. Turning off validation eliminates this complexity. The underlying
// sdk will only give back the dimensions which are valid for particular metrics.
ValidateDimensions: false,
}

// SetToDefault implements river.Defaulter.
Expand Down Expand Up @@ -72,5 +78,7 @@ func (a *Arguments) Convert() *azure_exporter.Config {
MetricNameTemplate: a.MetricNameTemplate,
MetricHelpTemplate: a.MetricHelpTemplate,
AzureCloudEnvironment: a.AzureCloudEnvironment,
ValidateDimensions: a.ValidateDimensions,
Regions: a.Regions,
}
}
37 changes: 20 additions & 17 deletions component/prometheus/exporter/elasticsearch/elasticsearch.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"time"

"github.com/grafana/agent/component"
commonCfg "github.com/grafana/agent/component/common/config"
"github.com/grafana/agent/component/prometheus/exporter"
"github.com/grafana/agent/pkg/integrations"
"github.com/grafana/agent/pkg/integrations/elasticsearch_exporter"
Expand Down Expand Up @@ -35,23 +36,24 @@ var DefaultArguments = Arguments{
}

type Arguments struct {
Address string `river:"address,attr,optional"`
Timeout time.Duration `river:"timeout,attr,optional"`
AllNodes bool `river:"all,attr,optional"`
Node string `river:"node,attr,optional"`
ExportIndices bool `river:"indices,attr,optional"`
ExportIndicesSettings bool `river:"indices_settings,attr,optional"`
ExportClusterSettings bool `river:"cluster_settings,attr,optional"`
ExportShards bool `river:"shards,attr,optional"`
IncludeAliases bool `river:"aliases,attr,optional"`
ExportSnapshots bool `river:"snapshots,attr,optional"`
ExportClusterInfoInterval time.Duration `river:"clusterinfo_interval,attr,optional"`
CA string `river:"ca,attr,optional"`
ClientPrivateKey string `river:"client_private_key,attr,optional"`
ClientCert string `river:"client_cert,attr,optional"`
InsecureSkipVerify bool `river:"ssl_skip_verify,attr,optional"`
ExportDataStreams bool `river:"data_stream,attr,optional"`
ExportSLM bool `river:"slm,attr,optional"`
Address string `river:"address,attr,optional"`
Timeout time.Duration `river:"timeout,attr,optional"`
AllNodes bool `river:"all,attr,optional"`
Node string `river:"node,attr,optional"`
ExportIndices bool `river:"indices,attr,optional"`
ExportIndicesSettings bool `river:"indices_settings,attr,optional"`
ExportClusterSettings bool `river:"cluster_settings,attr,optional"`
ExportShards bool `river:"shards,attr,optional"`
IncludeAliases bool `river:"aliases,attr,optional"`
ExportSnapshots bool `river:"snapshots,attr,optional"`
ExportClusterInfoInterval time.Duration `river:"clusterinfo_interval,attr,optional"`
CA string `river:"ca,attr,optional"`
ClientPrivateKey string `river:"client_private_key,attr,optional"`
ClientCert string `river:"client_cert,attr,optional"`
InsecureSkipVerify bool `river:"ssl_skip_verify,attr,optional"`
ExportDataStreams bool `river:"data_stream,attr,optional"`
ExportSLM bool `river:"slm,attr,optional"`
BasicAuth *commonCfg.BasicAuth `river:"basic_auth,block,optional"`
}

// SetToDefault implements river.Defaulter.
Expand All @@ -78,5 +80,6 @@ func (a *Arguments) Convert() *elasticsearch_exporter.Config {
InsecureSkipVerify: a.InsecureSkipVerify,
ExportDataStreams: a.ExportDataStreams,
ExportSLM: a.ExportSLM,
BasicAuth: a.BasicAuth.Convert(),
}
}
19 changes: 19 additions & 0 deletions component/prometheus/exporter/elasticsearch/elasticsearch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ import (
"testing"
"time"

commonCfg "github.com/grafana/agent/component/common/config"
"github.com/grafana/agent/pkg/integrations/elasticsearch_exporter"
"github.com/grafana/river"
"github.com/grafana/river/rivertypes"
promCfg "github.com/prometheus/common/config"
"github.com/stretchr/testify/require"
)

Expand All @@ -27,6 +30,10 @@ func TestRiverUnmarshal(t *testing.T) {
ssl_skip_verify = true
data_stream = true
slm = true
basic_auth {
username = "username"
password = "pass"
}
`

var args Arguments
Expand All @@ -50,6 +57,10 @@ func TestRiverUnmarshal(t *testing.T) {
InsecureSkipVerify: true,
ExportDataStreams: true,
ExportSLM: true,
BasicAuth: &commonCfg.BasicAuth{
Username: "username",
Password: rivertypes.Secret("pass"),
},
}

require.Equal(t, expected, args)
Expand All @@ -73,6 +84,10 @@ func TestConvert(t *testing.T) {
ssl_skip_verify = true
data_stream = true
slm = true
basic_auth {
username = "username"
password = "pass"
}
`
var args Arguments
err := river.Unmarshal([]byte(riverConfig), &args)
Expand All @@ -97,6 +112,10 @@ func TestConvert(t *testing.T) {
InsecureSkipVerify: true,
ExportDataStreams: true,
ExportSLM: true,
BasicAuth: &promCfg.BasicAuth{
Username: "username",
Password: promCfg.Secret("pass"),
},
}
require.Equal(t, expected, *res)
}
20 changes: 14 additions & 6 deletions component/prometheus/relabel/relabel.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,23 @@ type Arguments struct {
MetricRelabelConfigs []*flow_relabel.Config `river:"rule,block,optional"`

// Cache size to use for LRU cache.
//CacheSize int `river:"cache_size,attr,optional"`
CacheSize int `river:"max_cache_size,attr,optional"`
}

// SetToDefault implements river.Defaulter.
/*func (arg *Arguments) SetToDefault() {
func (arg *Arguments) SetToDefault() {
*arg = Arguments{
CacheSize: 500_000,
CacheSize: 100_000,
}
}*/
}

// Validate implements river.Validator.
func (arg *Arguments) Validate() error {
if arg.CacheSize <= 0 {
return fmt.Errorf("max_cache_size must be greater than 0 and is %d", arg.CacheSize)
}
return nil
}

// Exports holds values which are exported by the prometheus.relabel component.
type Exports struct {
Expand Down Expand Up @@ -88,7 +96,7 @@ var (

// New creates a new prometheus.relabel component.
func New(o component.Options, args Arguments) (*Component, error) {
cache, err := lru.New[uint64, *labelAndID](100_000)
cache, err := lru.New[uint64, *labelAndID](args.CacheSize)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -210,7 +218,7 @@ func (c *Component) Update(args component.Arguments) error {
defer c.mut.Unlock()

newArgs := args.(Arguments)
c.clearCache(100_000)
c.clearCache(newArgs.CacheSize)
c.mrc = flow_relabel.ComponentToPromRelabelConfigs(newArgs.MetricRelabelConfigs)
c.fanout.UpdateChildren(newArgs.ForwardTo)

Expand Down
14 changes: 13 additions & 1 deletion component/prometheus/relabel/relabel_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,22 @@ func TestUpdateReset(t *testing.T) {
relabeller.relabel(0, lbls)
require.True(t, relabeller.cache.Len() == 1)
_ = relabeller.Update(Arguments{
CacheSize: 100000,
MetricRelabelConfigs: []*flow_relabel.Config{},
})
require.True(t, relabeller.cache.Len() == 0)
}

func TestValidator(t *testing.T) {
args := Arguments{CacheSize: 0}
err := args.Validate()
require.Error(t, err)

args.CacheSize = 1
err = args.Validate()
require.NoError(t, err)
}

func TestNil(t *testing.T) {
ls := labelstore.New(nil, prom.DefaultRegisterer)
fanout := prometheus.NewInterceptor(nil, ls, prometheus.WithAppendHook(func(ref storage.SeriesRef, _ labels.Labels, _ int64, _ float64, _ storage.Appender) (storage.SeriesRef, error) {
Expand All @@ -72,6 +83,7 @@ func TestNil(t *testing.T) {
Action: "drop",
},
},
CacheSize: 100000,
})
require.NotNil(t, relabeller)
require.NoError(t, err)
Expand Down Expand Up @@ -129,7 +141,6 @@ func BenchmarkCache(b *testing.B) {

lbls := labels.FromStrings("__address__", "localhost")
app := entry.Appender(context.Background())

for i := 0; i < b.N; i++ {
app.Append(0, lbls, time.Now().UnixMilli(), 0)
}
Expand Down Expand Up @@ -161,6 +172,7 @@ func generateRelabel(t *testing.T) *Component {
Action: "replace",
},
},
CacheSize: 100_000,
})
require.NotNil(t, relabeller)
require.NoError(t, err)
Expand Down
8 changes: 8 additions & 0 deletions component/prometheus/remotewrite/remote_write.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (

"github.com/go-kit/log"
"github.com/grafana/agent/component"
"github.com/grafana/agent/internal/agentseed"
"github.com/grafana/agent/internal/useragent"
"github.com/grafana/agent/pkg/flow/logging/level"
"github.com/grafana/agent/pkg/metrics/wal"
Expand Down Expand Up @@ -257,6 +258,13 @@ func (c *Component) Update(newConfig component.Arguments) error {
if err != nil {
return err
}
uid := agentseed.Get().UID
for _, cfg := range convertedConfig.RemoteWriteConfigs {
if cfg.Headers == nil {
cfg.Headers = map[string]string{}
}
cfg.Headers[agentseed.HeaderName] = uid
}
err = c.remoteStore.ApplyConfig(convertedConfig)
if err != nil {
return err
Expand Down
1 change: 0 additions & 1 deletion component/prometheus/remotewrite/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,6 @@ func convertConfigs(cfg Arguments) (*config.Config, error) {
if err != nil {
return nil, fmt.Errorf("cannot parse remote_write url %q: %w", rw.URL, err)
}

rwConfigs = append(rwConfigs, &config.RemoteWriteConfig{
URL: &common.URL{URL: parsedURL},
RemoteTimeout: model.Duration(rw.RemoteTimeout),
Expand Down
Loading

0 comments on commit 30ead4c

Please sign in to comment.