From e70cc3d51b249bb682e944c216e4aff13445acc3 Mon Sep 17 00:00:00 2001 From: Michael Burt Date: Mon, 21 Oct 2024 21:59:25 -0600 Subject: [PATCH] [receiver/tlscheck] Implement Scraper --- ...lasticsearchexporter_compression-gzip.yaml | 27 ++++ .../tlscheckreceiver-implementation.yaml | 27 ++++ .golangci.yml | 1 + Makefile | 8 +- exporter/elasticsearchexporter/README.md | 2 +- exporter/elasticsearchexporter/bulkindexer.go | 7 + .../elasticsearchexporter/bulkindexer_test.go | 94 +++++++++-- exporter/elasticsearchexporter/config.go | 6 +- exporter/elasticsearchexporter/config_test.go | 30 +++- exporter/elasticsearchexporter/esclient.go | 10 +- exporter/elasticsearchexporter/factory.go | 2 + exporter/elasticsearchexporter/go.mod | 2 +- .../testdata/config.yaml | 6 + exporter/elasticsearchexporter/utils_test.go | 7 +- receiver/tlscheckreceiver/README.md | 4 +- receiver/tlscheckreceiver/config.go | 45 ++++-- receiver/tlscheckreceiver/config_test.go | 37 +++-- receiver/tlscheckreceiver/documentation.md | 7 +- receiver/tlscheckreceiver/factory.go | 11 +- receiver/tlscheckreceiver/go.mod | 1 - receiver/tlscheckreceiver/go.sum | 2 - .../internal/metadata/generated_config.go | 46 +----- .../metadata/generated_config_test.go | 52 +----- .../internal/metadata/generated_metrics.go | 55 ++----- .../metadata/generated_metrics_test.go | 18 +-- .../internal/metadata/generated_resource.go | 36 ----- .../metadata/generated_resource_test.go | 40 ----- .../internal/metadata/testdata/config.yaml | 18 --- receiver/tlscheckreceiver/metadata.yaml | 11 +- receiver/tlscheckreceiver/scraper.go | 75 ++++++++- receiver/tlscheckreceiver/scraper_test.go | 152 ++++++++++++++++++ 31 files changed, 520 insertions(+), 319 deletions(-) create mode 100644 .chloggen/elasticsearchexporter_compression-gzip.yaml create mode 100644 .chloggen/tlscheckreceiver-implementation.yaml delete mode 100644 receiver/tlscheckreceiver/internal/metadata/generated_resource.go delete mode 100644 receiver/tlscheckreceiver/internal/metadata/generated_resource_test.go create mode 100644 receiver/tlscheckreceiver/scraper_test.go diff --git a/.chloggen/elasticsearchexporter_compression-gzip.yaml b/.chloggen/elasticsearchexporter_compression-gzip.yaml new file mode 100644 index 000000000000..207d8f40322a --- /dev/null +++ b/.chloggen/elasticsearchexporter_compression-gzip.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: elasticsearchexporter + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Enable gzip compression by default + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35865] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: To disable compression, set config `compression` to `none`. + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.chloggen/tlscheckreceiver-implementation.yaml b/.chloggen/tlscheckreceiver-implementation.yaml new file mode 100644 index 000000000000..d9695418d232 --- /dev/null +++ b/.chloggen/tlscheckreceiver-implementation.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: tlscheckreceiver + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Implement TLS Check Receiver for host-based checks + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [35842] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.golangci.yml b/.golangci.yml index bf8564d6a445..e7d4a3846078 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -137,6 +137,7 @@ linters-settings: - float-compare - require-error - suite-subtest-run + - encoded-compare # has false positives that cannot be fixed with testifylint-fix enable-all: true linters: diff --git a/Makefile b/Makefile index 1e087f09fea2..86ecba3a3276 100644 --- a/Makefile +++ b/Makefile @@ -395,16 +395,16 @@ define updatehelper echo "Usage: updatehelper "; \ exit 1; \ fi - grep "go\.opentelemetry\.io" $(1) | sed 's/^\s*-\s*//' | while IFS= read -r line; do \ + grep "go\.opentelemetry\.io" $(1) | sed 's/^[[:space:]]*-[[:space:]]*//' | while IFS= read -r line; do \ if grep -qF "$$line" $(2); then \ package=$$(grep -F "$$line" $(2) | head -n 1 | awk '{print $$1}'); \ version=$$(grep -F "$$line" $(2) | head -n 1 | awk '{print $$2}'); \ builder_package=$$(grep -F "$$package" $(3) | awk '{print $$3}'); \ builder_version=$$(grep -F "$$package" $(3) | awk '{print $$4}'); \ if [ "$$builder_package" == "$$package" ]; then \ - echo "$$builder_version";\ - sed -i -e "s|$$builder_package.*$$builder_version|$$builder_package $$version|" $(3); \ - echo "[$(3)]: $$package updated to $$version"; \ + sed -i.bak -e "s|$$builder_package.*$$builder_version|$$builder_package $$version|" $(3); \ + rm $(3).bak; \ + echo "[$(3)]: $$package updated from $$builder_version to $$version"; \ fi; \ fi; \ done diff --git a/exporter/elasticsearchexporter/README.md b/exporter/elasticsearchexporter/README.md index b620b81158e9..eadb1e309803 100644 --- a/exporter/elasticsearchexporter/README.md +++ b/exporter/elasticsearchexporter/README.md @@ -71,7 +71,7 @@ service: ### HTTP settings -The Elasticsearch exporter supports common [HTTP Configuration Settings][confighttp], except for `compression` (all requests are uncompressed). +The Elasticsearch exporter supports common [HTTP Configuration Settings][confighttp]. Gzip compression is enabled by default. To disable compression, set `compression` to `none`. As a consequence of supporting [confighttp], the Elasticsearch exporter also supports common [TLS Configuration Settings][configtls]. The Elasticsearch exporter sets `timeout` (HTTP request timeout) to 90s by default. diff --git a/exporter/elasticsearchexporter/bulkindexer.go b/exporter/elasticsearchexporter/bulkindexer.go index 21b48814914d..1700b03619d6 100644 --- a/exporter/elasticsearchexporter/bulkindexer.go +++ b/exporter/elasticsearchexporter/bulkindexer.go @@ -4,6 +4,7 @@ package elasticsearchexporter // import "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/elasticsearchexporter" import ( + "compress/gzip" "context" "errors" "io" @@ -15,6 +16,7 @@ import ( "github.com/elastic/go-docappender/v2" "github.com/elastic/go-elasticsearch/v7" + "go.opentelemetry.io/collector/config/configcompression" "go.uber.org/zap" ) @@ -68,12 +70,17 @@ func bulkIndexerConfig(client *elasticsearch.Client, config *Config) docappender maxDocRetries = config.Retry.MaxRetries } } + var compressionLevel int + if config.Compression == configcompression.TypeGzip { + compressionLevel = gzip.BestSpeed + } return docappender.BulkIndexerConfig{ Client: client, MaxDocumentRetries: maxDocRetries, Pipeline: config.Pipeline, RetryOnDocumentStatus: config.Retry.RetryOnStatus, RequireDataStream: config.MappingMode() == MappingOTel, + CompressionLevel: compressionLevel, } } diff --git a/exporter/elasticsearchexporter/bulkindexer_test.go b/exporter/elasticsearchexporter/bulkindexer_test.go index 7a75c6f5a0f1..194890e69aa6 100644 --- a/exporter/elasticsearchexporter/bulkindexer_test.go +++ b/exporter/elasticsearchexporter/bulkindexer_test.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/go-elasticsearch/v7" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/config/confighttp" "go.uber.org/zap" "go.uber.org/zap/zapcore" "go.uber.org/zap/zaptest/observer" @@ -62,13 +63,8 @@ func TestAsyncBulkIndexer_flushOnClose(t *testing.T) { }}) require.NoError(t, err) - bulkIndexer, err := newAsyncBulkIndexer(zap.NewNop(), client, &cfg) - require.NoError(t, err) - session, err := bulkIndexer.StartSession(context.Background()) - require.NoError(t, err) + bulkIndexer := runBulkIndexerOnce(t, &cfg, client) - assert.NoError(t, session.Add(context.Background(), "foo", strings.NewReader(`{"foo": "bar"}`), nil)) - assert.NoError(t, bulkIndexer.Close(context.Background())) assert.Equal(t, int64(1), bulkIndexer.stats.docsIndexed.Load()) } @@ -157,13 +153,7 @@ func TestAsyncBulkIndexer_requireDataStream(t *testing.T) { }}) require.NoError(t, err) - bulkIndexer, err := newAsyncBulkIndexer(zap.NewNop(), client, &tt.config) - require.NoError(t, err) - session, err := bulkIndexer.StartSession(context.Background()) - require.NoError(t, err) - - assert.NoError(t, session.Add(context.Background(), "foo", strings.NewReader(`{"foo": "bar"}`), nil)) - assert.NoError(t, bulkIndexer.Close(context.Background())) + runBulkIndexerOnce(t, &tt.config, client) assert.Equal(t, tt.wantRequireDataStream, <-requireDataStreamCh) }) @@ -234,6 +224,8 @@ func TestAsyncBulkIndexer_flush_error(t *testing.T) { bulkIndexer, err := newAsyncBulkIndexer(zap.New(core), client, &cfg) require.NoError(t, err) + defer bulkIndexer.Close(context.Background()) + session, err := bulkIndexer.StartSession(context.Background()) require.NoError(t, err) @@ -241,7 +233,6 @@ func TestAsyncBulkIndexer_flush_error(t *testing.T) { // should flush time.Sleep(100 * time.Millisecond) assert.Equal(t, int64(0), bulkIndexer.stats.docsIndexed.Load()) - assert.NoError(t, bulkIndexer.Close(context.Background())) messages := observed.FilterMessage(tt.wantMessage) require.Equal(t, 1, messages.Len(), "message not found; observed.All()=%v", observed.All()) for _, wantField := range tt.wantFields { @@ -250,3 +241,78 @@ func TestAsyncBulkIndexer_flush_error(t *testing.T) { }) } } + +func TestAsyncBulkIndexer_logRoundTrip(t *testing.T) { + tests := []struct { + name string + config Config + }{ + { + name: "compression none", + config: Config{ + NumWorkers: 1, + ClientConfig: confighttp.ClientConfig{Compression: "none"}, + }, + }, + { + name: "compression gzip", + config: Config{ + NumWorkers: 1, + ClientConfig: confighttp.ClientConfig{Compression: "gzip"}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + loggerCore, logObserver := observer.New(zap.DebugLevel) + + esLogger := clientLogger{ + Logger: zap.New(loggerCore), + logRequestBody: true, + logResponseBody: true, + } + + client, err := elasticsearch.NewClient(elasticsearch.Config{ + Transport: &mockTransport{ + RoundTripFunc: func(*http.Request) (*http.Response, error) { + return &http.Response{ + Header: http.Header{"X-Elastic-Product": []string{"Elasticsearch"}}, + Body: io.NopCloser(strings.NewReader(successResp)), + }, nil + }, + }, + Logger: &esLogger, + }) + require.NoError(t, err) + + runBulkIndexerOnce(t, &tt.config, client) + + records := logObserver.AllUntimed() + assert.Len(t, records, 2) + + assert.Equal(t, "/", records[0].ContextMap()["path"]) + assert.Nil(t, records[0].ContextMap()["request_body"]) + assert.JSONEq(t, successResp, records[0].ContextMap()["response_body"].(string)) + + assert.Equal(t, "/_bulk", records[1].ContextMap()["path"]) + assert.Equal(t, "{\"create\":{\"_index\":\"foo\"}}\n{\"foo\": \"bar\"}\n", records[1].ContextMap()["request_body"]) + assert.JSONEq(t, successResp, records[1].ContextMap()["response_body"].(string)) + }) + } +} + +func runBulkIndexerOnce(t *testing.T, config *Config, client *elasticsearch.Client) *asyncBulkIndexer { + bulkIndexer, err := newAsyncBulkIndexer(zap.NewNop(), client, config) + require.NoError(t, err) + session, err := bulkIndexer.StartSession(context.Background()) + require.NoError(t, err) + + assert.NoError(t, session.Add(context.Background(), "foo", strings.NewReader(`{"foo": "bar"}`), nil)) + assert.NoError(t, bulkIndexer.Close(context.Background())) + + return bulkIndexer +} diff --git a/exporter/elasticsearchexporter/config.go b/exporter/elasticsearchexporter/config.go index fe794d6db430..0835396d928f 100644 --- a/exporter/elasticsearchexporter/config.go +++ b/exporter/elasticsearchexporter/config.go @@ -12,6 +12,7 @@ import ( "strings" "time" + "go.opentelemetry.io/collector/config/configcompression" "go.opentelemetry.io/collector/config/confighttp" "go.opentelemetry.io/collector/config/configopaque" "go.opentelemetry.io/collector/exporter/exporterbatcher" @@ -273,9 +274,8 @@ func (cfg *Config) Validate() error { return fmt.Errorf("unknown mapping mode %q", cfg.Mapping.Mode) } - if cfg.Compression != "" { - // TODO support confighttp.ClientConfig.Compression - return errors.New("compression is not currently configurable") + if cfg.Compression != "none" && cfg.Compression != configcompression.TypeGzip { + return errors.New("compression must be one of [none, gzip]") } if cfg.Retry.MaxRequests != 0 && cfg.Retry.MaxRetries != 0 { diff --git a/exporter/elasticsearchexporter/config_test.go b/exporter/elasticsearchexporter/config_test.go index 9934dbb7365b..baec2bd9646a 100644 --- a/exporter/elasticsearchexporter/config_test.go +++ b/exporter/elasticsearchexporter/config_test.go @@ -6,6 +6,7 @@ package elasticsearchexporter import ( "net/http" "path/filepath" + "strings" "testing" "time" @@ -38,6 +39,7 @@ func TestConfig(t *testing.T) { defaultMaxIdleConns := 100 defaultIdleConnTimeout := 90 * time.Second + defaultCompression := configcompression.TypeGzip tests := []struct { configFile string @@ -80,6 +82,7 @@ func TestConfig(t *testing.T) { cfg.Headers = map[string]configopaque.String{ "myheader": "test", } + cfg.Compression = defaultCompression }), Authentication: AuthenticationSettings{ User: "elastic", @@ -150,6 +153,7 @@ func TestConfig(t *testing.T) { cfg.Headers = map[string]configopaque.String{ "myheader": "test", } + cfg.Compression = defaultCompression }), Authentication: AuthenticationSettings{ User: "elastic", @@ -220,6 +224,7 @@ func TestConfig(t *testing.T) { cfg.Headers = map[string]configopaque.String{ "myheader": "test", } + cfg.Compression = defaultCompression }), Authentication: AuthenticationSettings{ User: "elastic", @@ -301,10 +306,29 @@ func TestConfig(t *testing.T) { cfg.Batcher.Enabled = &enabled }), }, + { + id: component.NewIDWithName(metadata.Type, "compression_none"), + configFile: "config.yaml", + expected: withDefaultConfig(func(cfg *Config) { + cfg.Endpoint = "https://elastic.example.com:9200" + + cfg.Compression = "none" + }), + }, + { + id: component.NewIDWithName(metadata.Type, "compression_gzip"), + configFile: "config.yaml", + expected: withDefaultConfig(func(cfg *Config) { + cfg.Endpoint = "https://elastic.example.com:9200" + + cfg.Compression = "gzip" + }), + }, } for _, tt := range tests { - t.Run(tt.id.String(), func(t *testing.T) { + tt := tt + t.Run(strings.ReplaceAll(tt.id.String(), "/", "_"), func(t *testing.T) { factory := NewFactory() cfg := factory.CreateDefaultConfig() @@ -387,9 +411,9 @@ func TestConfig_Validate(t *testing.T) { "compression unsupported": { config: withDefaultConfig(func(cfg *Config) { cfg.Endpoints = []string{"http://test:9200"} - cfg.Compression = configcompression.TypeGzip + cfg.Compression = configcompression.TypeSnappy }), - err: `compression is not currently configurable`, + err: `compression must be one of [none, gzip]`, }, "both max_retries and max_requests specified": { config: withDefaultConfig(func(cfg *Config) { diff --git a/exporter/elasticsearchexporter/esclient.go b/exporter/elasticsearchexporter/esclient.go index 556718242bbf..927f62844ec3 100644 --- a/exporter/elasticsearchexporter/esclient.go +++ b/exporter/elasticsearchexporter/esclient.go @@ -11,6 +11,7 @@ import ( "github.com/cenkalti/backoff/v4" "github.com/elastic/go-elasticsearch/v7" + "github.com/klauspost/compress/gzip" "go.opentelemetry.io/collector/component" "go.uber.org/zap" @@ -32,7 +33,14 @@ func (cl *clientLogger) LogRoundTrip(requ *http.Request, resp *http.Response, cl var fields []zap.Field if cl.logRequestBody && requ != nil && requ.Body != nil { - if b, err := io.ReadAll(requ.Body); err == nil { + body := requ.Body + if requ.Header.Get("Content-Encoding") == "gzip" { + if r, err := gzip.NewReader(body); err == nil { + defer r.Close() + body = r + } + } + if b, err := io.ReadAll(body); err == nil { fields = append(fields, zap.ByteString("request_body", b)) } } diff --git a/exporter/elasticsearchexporter/factory.go b/exporter/elasticsearchexporter/factory.go index 61af38d5cee6..31f9a17895f7 100644 --- a/exporter/elasticsearchexporter/factory.go +++ b/exporter/elasticsearchexporter/factory.go @@ -11,6 +11,7 @@ import ( "time" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configcompression" "go.opentelemetry.io/collector/config/confighttp" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/exporter" @@ -44,6 +45,7 @@ func createDefaultConfig() component.Config { httpClientConfig := confighttp.NewDefaultClientConfig() httpClientConfig.Timeout = 90 * time.Second + httpClientConfig.Compression = configcompression.TypeGzip return &Config{ QueueSettings: qs, diff --git a/exporter/elasticsearchexporter/go.mod b/exporter/elasticsearchexporter/go.mod index c9a675710622..470e56ec3e4d 100644 --- a/exporter/elasticsearchexporter/go.mod +++ b/exporter/elasticsearchexporter/go.mod @@ -8,6 +8,7 @@ require ( github.com/elastic/go-elasticsearch/v7 v7.17.10 github.com/elastic/go-structform v0.0.12 github.com/json-iterator/go v1.1.12 + github.com/klauspost/compress v1.17.10 github.com/lestrrat-go/strftime v1.1.0 github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.111.0 github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.111.0 @@ -44,7 +45,6 @@ require ( github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.6.0 // indirect github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 // indirect - github.com/klauspost/compress v1.17.10 // indirect github.com/knadh/koanf/maps v0.1.1 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect github.com/knadh/koanf/v2 v2.1.1 // indirect diff --git a/exporter/elasticsearchexporter/testdata/config.yaml b/exporter/elasticsearchexporter/testdata/config.yaml index d76d300a51c1..e3f7ffc67fa9 100644 --- a/exporter/elasticsearchexporter/testdata/config.yaml +++ b/exporter/elasticsearchexporter/testdata/config.yaml @@ -86,3 +86,9 @@ elasticsearch/batcher_disabled: endpoint: https://elastic.example.com:9200 batcher: enabled: false +elasticsearch/compression_none: + endpoint: https://elastic.example.com:9200 + compression: none +elasticsearch/compression_gzip: + endpoint: https://elastic.example.com:9200 + compression: gzip diff --git a/exporter/elasticsearchexporter/utils_test.go b/exporter/elasticsearchexporter/utils_test.go index 05e9d8576a57..94c475219ffb 100644 --- a/exporter/elasticsearchexporter/utils_test.go +++ b/exporter/elasticsearchexporter/utils_test.go @@ -16,6 +16,7 @@ import ( "testing" "time" + "github.com/klauspost/compress/gzip" "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" @@ -160,7 +161,11 @@ func newESTestServer(t *testing.T, bulkHandler bulkHandler) *httptest.Server { tsStart := time.Now() var items []itemRequest - dec := json.NewDecoder(req.Body) + body := req.Body + if req.Header.Get("Content-Encoding") == "gzip" { + body, _ = gzip.NewReader(req.Body) + } + dec := json.NewDecoder(body) for dec.More() { var action, doc json.RawMessage if err := dec.Decode(&action); err != nil { diff --git a/receiver/tlscheckreceiver/README.md b/receiver/tlscheckreceiver/README.md index 1a5a89bf0317..a5b617719c95 100644 --- a/receiver/tlscheckreceiver/README.md +++ b/receiver/tlscheckreceiver/README.md @@ -24,8 +24,8 @@ By default, the TLS Check Receiver will emit a single metric, `tlscheck.time_lef receivers: tlscheck: targets: - - url: https://example.com - - url: https://foobar.com:8080 + - host: example.com:443 + - host: foobar.com:8080 ``` ## Certificate Verification diff --git a/receiver/tlscheckreceiver/config.go b/receiver/tlscheckreceiver/config.go index d20c7b2e091d..02646081b35b 100644 --- a/receiver/tlscheckreceiver/config.go +++ b/receiver/tlscheckreceiver/config.go @@ -6,7 +6,9 @@ package tlscheckreceiver // import "github.com/open-telemetry/opentelemetry-coll import ( "errors" "fmt" - "net/url" + "net" + "strconv" + "strings" "go.opentelemetry.io/collector/receiver/scraperhelper" "go.uber.org/multierr" @@ -16,8 +18,7 @@ import ( // Predefined error responses for configuration validation failures var ( - errMissingURL = errors.New(`"url" must be specified`) - errInvalidURL = errors.New(`"url" must be in the form of ://[:]`) + errInvalidHost = errors.New(`"host" must be in the form of :`) ) // Config defines the configuration for the various elements of the receiver agent. @@ -28,20 +29,40 @@ type Config struct { } type targetConfig struct { - URL string `mapstructure:"url"` + Host string `mapstructure:"host"` +} + +func validatePort(port string) error { + portNum, err := strconv.Atoi(port) + if err != nil { + return fmt.Errorf("provided port is not a number: %s", port) + } + if portNum < 1 || portNum > 65535 { + return fmt.Errorf("provided port is out of valid range (1-65535): %d", portNum) + } + return nil } // Validate validates the configuration by checking for missing or invalid fields func (cfg *targetConfig) Validate() error { var err error - if cfg.URL == "" { - err = multierr.Append(err, errMissingURL) - } else { - _, parseErr := url.ParseRequestURI(cfg.URL) - if parseErr != nil { - err = multierr.Append(err, fmt.Errorf("%s: %w", errInvalidURL.Error(), parseErr)) - } + if cfg.Host == "" { + return ErrMissingTargets + } + + if strings.Contains(cfg.Host, "://") { + return fmt.Errorf("host contains a scheme, which is not allowed: %s", cfg.Host) + } + + _, port, parseErr := net.SplitHostPort(cfg.Host) + if parseErr != nil { + return fmt.Errorf("%s: %w", errInvalidHost.Error(), parseErr) + } + + portParseErr := validatePort(port) + if portParseErr != nil { + return fmt.Errorf("%s: %w", errInvalidHost.Error(), portParseErr) } return err @@ -52,7 +73,7 @@ func (cfg *Config) Validate() error { var err error if len(cfg.Targets) == 0 { - err = multierr.Append(err, errMissingURL) + err = multierr.Append(err, ErrMissingTargets) } for _, target := range cfg.Targets { diff --git a/receiver/tlscheckreceiver/config_test.go b/receiver/tlscheckreceiver/config_test.go index 54e1748352c9..e3fa89c0d877 100644 --- a/receiver/tlscheckreceiver/config_test.go +++ b/receiver/tlscheckreceiver/config_test.go @@ -23,56 +23,71 @@ func TestValidate(t *testing.T) { Targets: []*targetConfig{}, ControllerConfig: scraperhelper.NewDefaultControllerConfig(), }, - expectedErr: errMissingURL, + expectedErr: ErrMissingTargets, }, { - desc: "invalid url", + desc: "invalid host", cfg: &Config{ Targets: []*targetConfig{ { - URL: "invalid://endpoint: 12efg", + Host: "endpoint: 12efg", }, }, ControllerConfig: scraperhelper.NewDefaultControllerConfig(), }, - expectedErr: fmt.Errorf("%w: %s", errInvalidURL, `parse "invalid://endpoint: 12efg": invalid port ": 12efg" after host`), + expectedErr: fmt.Errorf("%w: %s", errInvalidHost, "provided port is not a number: 12efg"), }, { desc: "invalid config with multiple targets", cfg: &Config{ Targets: []*targetConfig{ { - URL: "invalid://endpoint: 12efg", + Host: "endpoint: 12efg", }, { - URL: "https://example.com", + Host: "https://example.com:80", }, }, ControllerConfig: scraperhelper.NewDefaultControllerConfig(), }, - expectedErr: fmt.Errorf("%w: %s", errInvalidURL, `parse "invalid://endpoint: 12efg": invalid port ": 12efg" after host`), + expectedErr: fmt.Errorf("%w: %s", errInvalidHost, `provided port is not a number: 12efg; host contains a scheme, which is not allowed: https://example.com:80`), + }, + { + desc: "port out of range", + cfg: &Config{ + Targets: []*targetConfig{ + { + Host: "www.opentelemetry.io:67000", + }, + }, + ControllerConfig: scraperhelper.NewDefaultControllerConfig(), + }, + expectedErr: fmt.Errorf("%w: %s", errInvalidHost, `provided port is out of valid range (1-65535): 67000`), }, { desc: "missing scheme", cfg: &Config{ Targets: []*targetConfig{ { - URL: "www.opentelemetry.io/docs", + Host: "www.opentelemetry.io/docs", }, }, ControllerConfig: scraperhelper.NewDefaultControllerConfig(), }, - expectedErr: fmt.Errorf("%w: %s", errInvalidURL, `parse "www.opentelemetry.io/docs": invalid URI for request`), + expectedErr: fmt.Errorf("%w: %s", errInvalidHost, `address www.opentelemetry.io/docs: missing port in address`), }, { desc: "valid config", cfg: &Config{ Targets: []*targetConfig{ { - URL: "https://opentelemetry.io", + Host: "opentelemetry.io:443", + }, + { + Host: "opentelemetry.io:8080", }, { - URL: "https://opentelemetry.io:80/docs", + Host: "111.222.33.44:10000", }, }, ControllerConfig: scraperhelper.NewDefaultControllerConfig(), diff --git a/receiver/tlscheckreceiver/documentation.md b/receiver/tlscheckreceiver/documentation.md index 1b0d5e7e5cf6..cccf198f3681 100644 --- a/receiver/tlscheckreceiver/documentation.md +++ b/receiver/tlscheckreceiver/documentation.md @@ -26,9 +26,4 @@ Time in seconds until certificate expiry, as specified by `NotAfter` field in th | ---- | ----------- | ------ | | tlscheck.x509.issuer | The entity that issued the certificate. | Any Str | | tlscheck.x509.cn | The commonName in the subject of the certificate. | Any Str | - -## Resource Attributes - -| Name | Description | Values | Enabled | -| ---- | ----------- | ------ | ------- | -| tlscheck.url | Url at which the certificate was accessed. | Any Str | true | +| tlscheck.host | Host at which the certificate was accessed. | Any Str | diff --git a/receiver/tlscheckreceiver/factory.go b/receiver/tlscheckreceiver/factory.go index bc99c145abf7..76fb5aec3714 100644 --- a/receiver/tlscheckreceiver/factory.go +++ b/receiver/tlscheckreceiver/factory.go @@ -6,6 +6,10 @@ package tlscheckreceiver // import "github.com/open-telemetry/opentelemetry-coll import ( "context" "errors" + "time" + + // "crypto/tls" + // "net" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer" @@ -28,8 +32,11 @@ func NewFactory() receiver.Factory { } func newDefaultConfig() component.Config { + cfg := scraperhelper.NewDefaultControllerConfig() + cfg.CollectionInterval = 60 * time.Second + return &Config{ - ControllerConfig: scraperhelper.NewDefaultControllerConfig(), + ControllerConfig: cfg, MetricsBuilderConfig: metadata.DefaultMetricsBuilderConfig(), Targets: []*targetConfig{}, } @@ -46,7 +53,7 @@ func newReceiver( return nil, errConfigNotTLSCheck } - mp := newScraper(tlsCheckConfig, settings) + mp := newScraper(tlsCheckConfig, settings, getConnectionState) s, err := scraperhelper.NewScraper(metadata.Type, mp.scrape) if err != nil { return nil, err diff --git a/receiver/tlscheckreceiver/go.mod b/receiver/tlscheckreceiver/go.mod index 30e7601ee72a..e9b9accfb1f9 100644 --- a/receiver/tlscheckreceiver/go.mod +++ b/receiver/tlscheckreceiver/go.mod @@ -9,7 +9,6 @@ require ( go.opentelemetry.io/collector/confmap v1.17.1-0.20241008154146-ea48c09c31ae go.opentelemetry.io/collector/consumer v0.111.1-0.20241008154146-ea48c09c31ae go.opentelemetry.io/collector/consumer/consumertest v0.111.1-0.20241008154146-ea48c09c31ae - go.opentelemetry.io/collector/filter v0.111.1-0.20241008154146-ea48c09c31ae go.opentelemetry.io/collector/pdata v1.17.1-0.20241008154146-ea48c09c31ae go.opentelemetry.io/collector/receiver v0.111.1-0.20241008154146-ea48c09c31ae go.uber.org/goleak v1.3.0 diff --git a/receiver/tlscheckreceiver/go.sum b/receiver/tlscheckreceiver/go.sum index 968b2bb483d2..b2d1f7ab2c0f 100644 --- a/receiver/tlscheckreceiver/go.sum +++ b/receiver/tlscheckreceiver/go.sum @@ -60,8 +60,6 @@ go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.1-0.2024100815414 go.opentelemetry.io/collector/consumer/consumerprofiles v0.111.1-0.20241008154146-ea48c09c31ae/go.mod h1:GK0QMMiRBWl4IhIF/7ZKgzBlR9SdRSpRlqyNInN4ZoU= go.opentelemetry.io/collector/consumer/consumertest v0.111.1-0.20241008154146-ea48c09c31ae h1:HFj6D19fJYm3KV8QidQmMApmLjzoNkzh8El5OkTGySo= go.opentelemetry.io/collector/consumer/consumertest v0.111.1-0.20241008154146-ea48c09c31ae/go.mod h1:UDZRrSgaFAwWO6I34fj0KjabVAuBCAnmizsleyIe3I4= -go.opentelemetry.io/collector/filter v0.111.1-0.20241008154146-ea48c09c31ae h1:fLRV9bU33PJWQ/eZCwzfKdV0I9ljhP84Zoq9+tBhcLU= -go.opentelemetry.io/collector/filter v0.111.1-0.20241008154146-ea48c09c31ae/go.mod h1:74Acew42eexKiuLu3tVehyMK4b5XJPWXoJyNjK2FM+U= go.opentelemetry.io/collector/internal/globalsignal v0.111.0 h1:oq0nSD+7K2Q1Fx5d3s6lPRdKZeTL0FEg4sIaR7ZJzIc= go.opentelemetry.io/collector/internal/globalsignal v0.111.0/go.mod h1:GqMXodPWOxK5uqpX8MaMXC2389y2XJTa5nPwf8FYDK8= go.opentelemetry.io/collector/pdata v1.17.1-0.20241008154146-ea48c09c31ae h1:PcwZe1RD8tC4SZExhf0f5HqK+ZuWGsowHaBBU4PiUv0= diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_config.go b/receiver/tlscheckreceiver/internal/metadata/generated_config.go index 96e738301b15..a3a498b525c1 100644 --- a/receiver/tlscheckreceiver/internal/metadata/generated_config.go +++ b/receiver/tlscheckreceiver/internal/metadata/generated_config.go @@ -4,7 +4,6 @@ package metadata import ( "go.opentelemetry.io/collector/confmap" - "go.opentelemetry.io/collector/filter" ) // MetricConfig provides common config for a particular metric. @@ -39,54 +38,13 @@ func DefaultMetricsConfig() MetricsConfig { } } -// ResourceAttributeConfig provides common config for a particular resource attribute. -type ResourceAttributeConfig struct { - Enabled bool `mapstructure:"enabled"` - // Experimental: MetricsInclude defines a list of filters for attribute values. - // If the list is not empty, only metrics with matching resource attribute values will be emitted. - MetricsInclude []filter.Config `mapstructure:"metrics_include"` - // Experimental: MetricsExclude defines a list of filters for attribute values. - // If the list is not empty, metrics with matching resource attribute values will not be emitted. - // MetricsInclude has higher priority than MetricsExclude. - MetricsExclude []filter.Config `mapstructure:"metrics_exclude"` - - enabledSetByUser bool -} - -func (rac *ResourceAttributeConfig) Unmarshal(parser *confmap.Conf) error { - if parser == nil { - return nil - } - err := parser.Unmarshal(rac) - if err != nil { - return err - } - rac.enabledSetByUser = parser.IsSet("enabled") - return nil -} - -// ResourceAttributesConfig provides config for tlscheck resource attributes. -type ResourceAttributesConfig struct { - TlscheckURL ResourceAttributeConfig `mapstructure:"tlscheck.url"` -} - -func DefaultResourceAttributesConfig() ResourceAttributesConfig { - return ResourceAttributesConfig{ - TlscheckURL: ResourceAttributeConfig{ - Enabled: true, - }, - } -} - // MetricsBuilderConfig is a configuration for tlscheck metrics builder. type MetricsBuilderConfig struct { - Metrics MetricsConfig `mapstructure:"metrics"` - ResourceAttributes ResourceAttributesConfig `mapstructure:"resource_attributes"` + Metrics MetricsConfig `mapstructure:"metrics"` } func DefaultMetricsBuilderConfig() MetricsBuilderConfig { return MetricsBuilderConfig{ - Metrics: DefaultMetricsConfig(), - ResourceAttributes: DefaultResourceAttributesConfig(), + Metrics: DefaultMetricsConfig(), } } diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_config_test.go b/receiver/tlscheckreceiver/internal/metadata/generated_config_test.go index 7c84015ab981..fa7e3f38c50d 100644 --- a/receiver/tlscheckreceiver/internal/metadata/generated_config_test.go +++ b/receiver/tlscheckreceiver/internal/metadata/generated_config_test.go @@ -27,9 +27,6 @@ func TestMetricsBuilderConfig(t *testing.T) { Metrics: MetricsConfig{ TlscheckTimeLeft: MetricConfig{Enabled: true}, }, - ResourceAttributes: ResourceAttributesConfig{ - TlscheckURL: ResourceAttributeConfig{Enabled: true}, - }, }, }, { @@ -38,16 +35,13 @@ func TestMetricsBuilderConfig(t *testing.T) { Metrics: MetricsConfig{ TlscheckTimeLeft: MetricConfig{Enabled: false}, }, - ResourceAttributes: ResourceAttributesConfig{ - TlscheckURL: ResourceAttributeConfig{Enabled: false}, - }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cfg := loadMetricsBuilderConfig(t, tt.name) - if diff := cmp.Diff(tt.want, cfg, cmpopts.IgnoreUnexported(MetricConfig{}, ResourceAttributeConfig{})); diff != "" { + if diff := cmp.Diff(tt.want, cfg, cmpopts.IgnoreUnexported(MetricConfig{})); diff != "" { t.Errorf("Config mismatch (-expected +actual):\n%s", diff) } }) @@ -63,47 +57,3 @@ func loadMetricsBuilderConfig(t *testing.T, name string) MetricsBuilderConfig { require.NoError(t, sub.Unmarshal(&cfg)) return cfg } - -func TestResourceAttributesConfig(t *testing.T) { - tests := []struct { - name string - want ResourceAttributesConfig - }{ - { - name: "default", - want: DefaultResourceAttributesConfig(), - }, - { - name: "all_set", - want: ResourceAttributesConfig{ - TlscheckURL: ResourceAttributeConfig{Enabled: true}, - }, - }, - { - name: "none_set", - want: ResourceAttributesConfig{ - TlscheckURL: ResourceAttributeConfig{Enabled: false}, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - cfg := loadResourceAttributesConfig(t, tt.name) - if diff := cmp.Diff(tt.want, cfg, cmpopts.IgnoreUnexported(ResourceAttributeConfig{})); diff != "" { - t.Errorf("Config mismatch (-expected +actual):\n%s", diff) - } - }) - } -} - -func loadResourceAttributesConfig(t *testing.T, name string) ResourceAttributesConfig { - cm, err := confmaptest.LoadConf(filepath.Join("testdata", "config.yaml")) - require.NoError(t, err) - sub, err := cm.Sub(name) - require.NoError(t, err) - sub, err = sub.Sub("resource_attributes") - require.NoError(t, err) - cfg := DefaultResourceAttributesConfig() - require.NoError(t, sub.Unmarshal(&cfg)) - return cfg -} diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_metrics.go b/receiver/tlscheckreceiver/internal/metadata/generated_metrics.go index 988cf4f1cdcb..7aa27a53bfaf 100644 --- a/receiver/tlscheckreceiver/internal/metadata/generated_metrics.go +++ b/receiver/tlscheckreceiver/internal/metadata/generated_metrics.go @@ -6,7 +6,6 @@ import ( "time" "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/filter" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/receiver" @@ -27,7 +26,7 @@ func (m *metricTlscheckTimeLeft) init() { m.data.Gauge().DataPoints().EnsureCapacity(m.capacity) } -func (m *metricTlscheckTimeLeft) recordDataPoint(start pcommon.Timestamp, ts pcommon.Timestamp, val int64, tlscheckX509IssuerAttributeValue string, tlscheckX509CnAttributeValue string) { +func (m *metricTlscheckTimeLeft) recordDataPoint(start pcommon.Timestamp, ts pcommon.Timestamp, val int64, tlscheckX509IssuerAttributeValue string, tlscheckX509CnAttributeValue string, tlscheckHostAttributeValue string) { if !m.config.Enabled { return } @@ -37,6 +36,7 @@ func (m *metricTlscheckTimeLeft) recordDataPoint(start pcommon.Timestamp, ts pco dp.SetIntValue(val) dp.Attributes().PutStr("tlscheck.x509.issuer", tlscheckX509IssuerAttributeValue) dp.Attributes().PutStr("tlscheck.x509.cn", tlscheckX509CnAttributeValue) + dp.Attributes().PutStr("tlscheck.host", tlscheckHostAttributeValue) } // updateCapacity saves max length of data point slices that will be used for the slice capacity. @@ -67,14 +67,12 @@ func newMetricTlscheckTimeLeft(cfg MetricConfig) metricTlscheckTimeLeft { // MetricsBuilder provides an interface for scrapers to report metrics while taking care of all the transformations // required to produce metric representation defined in metadata and user config. type MetricsBuilder struct { - config MetricsBuilderConfig // config of the metrics builder. - startTime pcommon.Timestamp // start time that will be applied to all recorded data points. - metricsCapacity int // maximum observed number of metrics per resource. - metricsBuffer pmetric.Metrics // accumulates metrics data before emitting. - buildInfo component.BuildInfo // contains version information. - resourceAttributeIncludeFilter map[string]filter.Filter - resourceAttributeExcludeFilter map[string]filter.Filter - metricTlscheckTimeLeft metricTlscheckTimeLeft + config MetricsBuilderConfig // config of the metrics builder. + startTime pcommon.Timestamp // start time that will be applied to all recorded data points. + metricsCapacity int // maximum observed number of metrics per resource. + metricsBuffer pmetric.Metrics // accumulates metrics data before emitting. + buildInfo component.BuildInfo // contains version information. + metricTlscheckTimeLeft metricTlscheckTimeLeft } // MetricBuilderOption applies changes to default metrics builder. @@ -97,19 +95,11 @@ func WithStartTime(startTime pcommon.Timestamp) MetricBuilderOption { func NewMetricsBuilder(mbc MetricsBuilderConfig, settings receiver.Settings, options ...MetricBuilderOption) *MetricsBuilder { mb := &MetricsBuilder{ - config: mbc, - startTime: pcommon.NewTimestampFromTime(time.Now()), - metricsBuffer: pmetric.NewMetrics(), - buildInfo: settings.BuildInfo, - metricTlscheckTimeLeft: newMetricTlscheckTimeLeft(mbc.Metrics.TlscheckTimeLeft), - resourceAttributeIncludeFilter: make(map[string]filter.Filter), - resourceAttributeExcludeFilter: make(map[string]filter.Filter), - } - if mbc.ResourceAttributes.TlscheckURL.MetricsInclude != nil { - mb.resourceAttributeIncludeFilter["tlscheck.url"] = filter.CreateFilter(mbc.ResourceAttributes.TlscheckURL.MetricsInclude) - } - if mbc.ResourceAttributes.TlscheckURL.MetricsExclude != nil { - mb.resourceAttributeExcludeFilter["tlscheck.url"] = filter.CreateFilter(mbc.ResourceAttributes.TlscheckURL.MetricsExclude) + config: mbc, + startTime: pcommon.NewTimestampFromTime(time.Now()), + metricsBuffer: pmetric.NewMetrics(), + buildInfo: settings.BuildInfo, + metricTlscheckTimeLeft: newMetricTlscheckTimeLeft(mbc.Metrics.TlscheckTimeLeft), } for _, op := range options { @@ -118,11 +108,6 @@ func NewMetricsBuilder(mbc MetricsBuilderConfig, settings receiver.Settings, opt return mb } -// NewResourceBuilder returns a new resource builder that should be used to build a resource associated with for the emitted metrics. -func (mb *MetricsBuilder) NewResourceBuilder() *ResourceBuilder { - return NewResourceBuilder(mb.config.ResourceAttributes) -} - // updateCapacity updates max length of metrics and resource attributes that will be used for the slice capacity. func (mb *MetricsBuilder) updateCapacity(rm pmetric.ResourceMetrics) { if mb.metricsCapacity < rm.ScopeMetrics().At(0).Metrics().Len() { @@ -185,16 +170,6 @@ func (mb *MetricsBuilder) EmitForResource(options ...ResourceMetricsOption) { for _, op := range options { op.apply(rm) } - for attr, filter := range mb.resourceAttributeIncludeFilter { - if val, ok := rm.Resource().Attributes().Get(attr); ok && !filter.Matches(val.AsString()) { - return - } - } - for attr, filter := range mb.resourceAttributeExcludeFilter { - if val, ok := rm.Resource().Attributes().Get(attr); ok && filter.Matches(val.AsString()) { - return - } - } if ils.Metrics().Len() > 0 { mb.updateCapacity(rm) @@ -213,8 +188,8 @@ func (mb *MetricsBuilder) Emit(options ...ResourceMetricsOption) pmetric.Metrics } // RecordTlscheckTimeLeftDataPoint adds a data point to tlscheck.time_left metric. -func (mb *MetricsBuilder) RecordTlscheckTimeLeftDataPoint(ts pcommon.Timestamp, val int64, tlscheckX509IssuerAttributeValue string, tlscheckX509CnAttributeValue string) { - mb.metricTlscheckTimeLeft.recordDataPoint(mb.startTime, ts, val, tlscheckX509IssuerAttributeValue, tlscheckX509CnAttributeValue) +func (mb *MetricsBuilder) RecordTlscheckTimeLeftDataPoint(ts pcommon.Timestamp, val int64, tlscheckX509IssuerAttributeValue string, tlscheckX509CnAttributeValue string, tlscheckHostAttributeValue string) { + mb.metricTlscheckTimeLeft.recordDataPoint(mb.startTime, ts, val, tlscheckX509IssuerAttributeValue, tlscheckX509CnAttributeValue, tlscheckHostAttributeValue) } // Reset resets metrics builder to its initial state. It should be used when external metrics source is restarted, diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_metrics_test.go b/receiver/tlscheckreceiver/internal/metadata/generated_metrics_test.go index b81713f24b82..5c37211d987f 100644 --- a/receiver/tlscheckreceiver/internal/metadata/generated_metrics_test.go +++ b/receiver/tlscheckreceiver/internal/metadata/generated_metrics_test.go @@ -42,15 +42,6 @@ func TestMetricsBuilder(t *testing.T) { resAttrsSet: testDataSetNone, expectEmpty: true, }, - { - name: "filter_set_include", - resAttrsSet: testDataSetAll, - }, - { - name: "filter_set_exclude", - resAttrsSet: testDataSetAll, - expectEmpty: true, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -70,11 +61,9 @@ func TestMetricsBuilder(t *testing.T) { defaultMetricsCount++ allMetricsCount++ - mb.RecordTlscheckTimeLeftDataPoint(ts, 1, "tlscheck.x509.issuer-val", "tlscheck.x509.cn-val") + mb.RecordTlscheckTimeLeftDataPoint(ts, 1, "tlscheck.x509.issuer-val", "tlscheck.x509.cn-val", "tlscheck.host-val") - rb := mb.NewResourceBuilder() - rb.SetTlscheckURL("tlscheck.url-val") - res := rb.Emit() + res := pcommon.NewResource() metrics := mb.Emit(WithResource(res)) if tt.expectEmpty { @@ -114,6 +103,9 @@ func TestMetricsBuilder(t *testing.T) { attrVal, ok = dp.Attributes().Get("tlscheck.x509.cn") assert.True(t, ok) assert.EqualValues(t, "tlscheck.x509.cn-val", attrVal.Str()) + attrVal, ok = dp.Attributes().Get("tlscheck.host") + assert.True(t, ok) + assert.EqualValues(t, "tlscheck.host-val", attrVal.Str()) } } }) diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_resource.go b/receiver/tlscheckreceiver/internal/metadata/generated_resource.go deleted file mode 100644 index e51961b1db39..000000000000 --- a/receiver/tlscheckreceiver/internal/metadata/generated_resource.go +++ /dev/null @@ -1,36 +0,0 @@ -// Code generated by mdatagen. DO NOT EDIT. - -package metadata - -import ( - "go.opentelemetry.io/collector/pdata/pcommon" -) - -// ResourceBuilder is a helper struct to build resources predefined in metadata.yaml. -// The ResourceBuilder is not thread-safe and must not to be used in multiple goroutines. -type ResourceBuilder struct { - config ResourceAttributesConfig - res pcommon.Resource -} - -// NewResourceBuilder creates a new ResourceBuilder. This method should be called on the start of the application. -func NewResourceBuilder(rac ResourceAttributesConfig) *ResourceBuilder { - return &ResourceBuilder{ - config: rac, - res: pcommon.NewResource(), - } -} - -// SetTlscheckURL sets provided value as "tlscheck.url" attribute. -func (rb *ResourceBuilder) SetTlscheckURL(val string) { - if rb.config.TlscheckURL.Enabled { - rb.res.Attributes().PutStr("tlscheck.url", val) - } -} - -// Emit returns the built resource and resets the internal builder state. -func (rb *ResourceBuilder) Emit() pcommon.Resource { - r := rb.res - rb.res = pcommon.NewResource() - return r -} diff --git a/receiver/tlscheckreceiver/internal/metadata/generated_resource_test.go b/receiver/tlscheckreceiver/internal/metadata/generated_resource_test.go deleted file mode 100644 index 4a67d0fd5ad5..000000000000 --- a/receiver/tlscheckreceiver/internal/metadata/generated_resource_test.go +++ /dev/null @@ -1,40 +0,0 @@ -// Code generated by mdatagen. DO NOT EDIT. - -package metadata - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestResourceBuilder(t *testing.T) { - for _, tt := range []string{"default", "all_set", "none_set"} { - t.Run(tt, func(t *testing.T) { - cfg := loadResourceAttributesConfig(t, tt) - rb := NewResourceBuilder(cfg) - rb.SetTlscheckURL("tlscheck.url-val") - - res := rb.Emit() - assert.Equal(t, 0, rb.Emit().Attributes().Len()) // Second call should return empty Resource - - switch tt { - case "default": - assert.Equal(t, 1, res.Attributes().Len()) - case "all_set": - assert.Equal(t, 1, res.Attributes().Len()) - case "none_set": - assert.Equal(t, 0, res.Attributes().Len()) - return - default: - assert.Failf(t, "unexpected test case: %s", tt) - } - - val, ok := res.Attributes().Get("tlscheck.url") - assert.True(t, ok) - if ok { - assert.EqualValues(t, "tlscheck.url-val", val.Str()) - } - }) - } -} diff --git a/receiver/tlscheckreceiver/internal/metadata/testdata/config.yaml b/receiver/tlscheckreceiver/internal/metadata/testdata/config.yaml index 7dc13e51f71c..b8974d99aa9d 100644 --- a/receiver/tlscheckreceiver/internal/metadata/testdata/config.yaml +++ b/receiver/tlscheckreceiver/internal/metadata/testdata/config.yaml @@ -3,25 +3,7 @@ all_set: metrics: tlscheck.time_left: enabled: true - resource_attributes: - tlscheck.url: - enabled: true none_set: metrics: tlscheck.time_left: enabled: false - resource_attributes: - tlscheck.url: - enabled: false -filter_set_include: - resource_attributes: - tlscheck.url: - enabled: true - metrics_include: - - regexp: ".*" -filter_set_exclude: - resource_attributes: - tlscheck.url: - enabled: true - metrics_exclude: - - strict: "tlscheck.url-val" diff --git a/receiver/tlscheckreceiver/metadata.yaml b/receiver/tlscheckreceiver/metadata.yaml index 843444b4c35f..c5cad04d1a24 100644 --- a/receiver/tlscheckreceiver/metadata.yaml +++ b/receiver/tlscheckreceiver/metadata.yaml @@ -8,14 +8,13 @@ status: codeowners: active: [atoulme, michael-burt] - resource_attributes: - tlscheck.url: - enabled: true - description: Url at which the certificate was accessed. - type: string attributes: + tlscheck.host: + enabled: true + description: Host at which the certificate was accessed. + type: string tlscheck.x509.issuer: enabled: true description: The entity that issued the certificate. @@ -32,4 +31,4 @@ metrics: gauge: value_type: int unit: "s" - attributes: [tlscheck.x509.issuer, tlscheck.x509.cn] \ No newline at end of file + attributes: [tlscheck.x509.issuer, tlscheck.x509.cn, tlscheck.host] \ No newline at end of file diff --git a/receiver/tlscheckreceiver/scraper.go b/receiver/tlscheckreceiver/scraper.go index c4807cc78eff..4dda9ce86c36 100644 --- a/receiver/tlscheckreceiver/scraper.go +++ b/receiver/tlscheckreceiver/scraper.go @@ -5,7 +5,16 @@ package tlscheckreceiver // import "github.com/open-telemetry/opentelemetry-coll import ( "context" + "crypto/tls" + "errors" + "sync" + "time" + // "fmt" + // "net" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/receiver" "go.uber.org/zap" @@ -13,19 +22,71 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tlscheckreceiver/internal/metadata" ) +var ( + ErrMissingTargets = errors.New(`No targets specified`) +) + type scraper struct { - // include string - logger *zap.Logger - mb *metadata.MetricsBuilder + cfg *Config + settings component.TelemetrySettings + mb *metadata.MetricsBuilder + getConnectionState func(host string) (tls.ConnectionState, error) +} + +func getConnectionState(host string) (tls.ConnectionState, error) { + conn, err := tls.Dial("tcp", host, &tls.Config{InsecureSkipVerify: true}) + if err != nil { + return tls.ConnectionState{}, err + } + defer conn.Close() + return conn.ConnectionState(), nil } func (s *scraper) scrape(_ context.Context) (pmetric.Metrics, error) { - return pmetric.NewMetrics(), nil + if s.cfg == nil || len(s.cfg.Targets) == 0 { + return pmetric.NewMetrics(), ErrMissingTargets + } + + var wg sync.WaitGroup + wg.Add(len(s.cfg.Targets)) + var mux sync.Mutex + for _, target := range s.cfg.Targets { + go func(host string) { + defer wg.Done() + + now := pcommon.NewTimestampFromTime(time.Now()) + mux.Lock() + state, err := s.getConnectionState(target.Host) + if err != nil { + s.settings.Logger.Error("TCP connection error encountered", zap.String("host", target.Host), zap.Error(err)) + } + + s.settings.Logger.Error("Peer Certificates", zap.Int("certificates_count", len(state.PeerCertificates))) + if len(state.PeerCertificates) == 0 { + s.settings.Logger.Error("No TLS certificates found. Verify the host serves TLS certificates.", zap.String("host", target.Host)) + } + + cert := state.PeerCertificates[0] + issuer := cert.Issuer.String() + commonName := cert.Subject.CommonName + currentTime := time.Now() + timeLeft := cert.NotAfter.Sub(currentTime).Seconds() + timeLeftInt := int64(timeLeft) + s.mb.RecordTlscheckTimeLeftDataPoint(now, timeLeftInt, issuer, commonName, host) + mux.Unlock() + + }(target.Host) + } + + wg.Wait() + return s.mb.Emit(), nil } -func newScraper(cfg *Config, settings receiver.Settings) *scraper { +func newScraper(cfg *Config, settings receiver.Settings, getConnectionState func(host string) (tls.ConnectionState, error)) *scraper { return &scraper{ - logger: settings.TelemetrySettings.Logger, - mb: metadata.NewMetricsBuilder(cfg.MetricsBuilderConfig, settings), + cfg: cfg, + settings: settings.TelemetrySettings, + mb: metadata.NewMetricsBuilder(metadata.DefaultMetricsBuilderConfig(), settings), + getConnectionState: getConnectionState, // Function to get ConnectionState } } diff --git a/receiver/tlscheckreceiver/scraper_test.go b/receiver/tlscheckreceiver/scraper_test.go new file mode 100644 index 000000000000..2b3f7a057244 --- /dev/null +++ b/receiver/tlscheckreceiver/scraper_test.go @@ -0,0 +1,152 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package tlscheckreceiver + +import ( + "context" + "crypto/tls" + "crypto/x509" + "crypto/x509/pkix" + + // "net" + // "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/receiver/receivertest" + // "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/tlscheckreceiver/internal/metadata" +) + +func mockGetConnectionStateValid(host string) (tls.ConnectionState, error) { + // Simulate a successful certificate retrieval + cert := &x509.Certificate{ + NotBefore: time.Now().Add(-1 * time.Hour), + NotAfter: time.Now().Add(24 * time.Hour), // Cert valid for 24 more hours + Subject: pkix.Name{CommonName: "valid.com"}, + Issuer: pkix.Name{CommonName: "ValidIssuer"}, + } + + return tls.ConnectionState{ + PeerCertificates: []*x509.Certificate{cert}, + }, nil +} + +func mockGetConnectionStateExpired(host string) (tls.ConnectionState, error) { + cert := &x509.Certificate{ + NotBefore: time.Now().Add(-48 * time.Hour), + NotAfter: time.Now().Add(-24 * time.Hour), // Cert expired 24 hours ago + Subject: pkix.Name{CommonName: "expired.com"}, + Issuer: pkix.Name{CommonName: "ExpiredIssuer"}, + } + + return tls.ConnectionState{ + PeerCertificates: []*x509.Certificate{cert}, + }, nil +} + +func mockGetConnectionStateNotYetValid(host string) (tls.ConnectionState, error) { + cert := &x509.Certificate{ + NotBefore: time.Now().Add(48 * time.Hour), + NotAfter: time.Now().Add(24 * time.Hour), // Cert expired 24 hours ago + Subject: pkix.Name{CommonName: "notyetvalid.com"}, + Issuer: pkix.Name{CommonName: "NotYetValidIssuer"}, + } + + return tls.ConnectionState{ + PeerCertificates: []*x509.Certificate{cert}, + }, nil +} + +func TestScrape_ValidCertificate(t *testing.T) { + cfg := &Config{ + Targets: []*targetConfig{ + {Host: "example.com:443"}, + }, + } + settings := receivertest.NewNopSettings() + s := newScraper(cfg, settings, mockGetConnectionStateValid) + + metrics, err := s.scrape(context.Background()) + require.NoError(t, err) + + assert.Equal(t, 1, metrics.DataPointCount()) + + rm := metrics.ResourceMetrics().At(0) + ilms := rm.ScopeMetrics().At(0) + metric := ilms.Metrics().At(0) + dp := metric.Gauge().DataPoints().At(0) + + attributes := dp.Attributes() + issuer, _ := attributes.Get("tlscheck.x509.issuer") + commonName, _ := attributes.Get("tlscheck.x509.cn") + + assert.Equal(t, "CN=ValidIssuer", issuer.AsString()) + assert.Equal(t, "valid.com", commonName.AsString()) +} + +func TestScrape_ExpiredCertificate(t *testing.T) { + cfg := &Config{ + Targets: []*targetConfig{ + {Host: "expired.com:443"}, + }, + } + settings := receivertest.NewNopSettings() + s := newScraper(cfg, settings, mockGetConnectionStateExpired) + + metrics, err := s.scrape(context.Background()) + require.NoError(t, err) + + assert.Equal(t, 1, metrics.DataPointCount()) + + // Additional checks for issuer and common name + rm := metrics.ResourceMetrics().At(0) + ilms := rm.ScopeMetrics().At(0) + metric := ilms.Metrics().At(0) + dp := metric.Gauge().DataPoints().At(0) + + attributes := dp.Attributes() + issuer, _ := attributes.Get("tlscheck.x509.issuer") + commonName, _ := attributes.Get("tlscheck.x509.cn") + + assert.Equal(t, "CN=ExpiredIssuer", issuer.AsString()) + assert.Equal(t, "expired.com", commonName.AsString()) + + // Ensure that timeLeft is negative for an expired cert + timeLeft := dp.IntValue() + assert.Less(t, timeLeft, int64(0), "Time left should be negative for an expired certificate") +} + +func TestScrape_NotYetValidCertificate(t *testing.T) { + cfg := &Config{ + Targets: []*targetConfig{ + {Host: "expired.com:443"}, + }, + } + settings := receivertest.NewNopSettings() + s := newScraper(cfg, settings, mockGetConnectionStateNotYetValid) + + metrics, err := s.scrape(context.Background()) + require.NoError(t, err) + + assert.Equal(t, 1, metrics.DataPointCount()) + + // Additional checks for issuer and common name + rm := metrics.ResourceMetrics().At(0) + ilms := rm.ScopeMetrics().At(0) + metric := ilms.Metrics().At(0) + dp := metric.Gauge().DataPoints().At(0) + + attributes := dp.Attributes() + issuer, _ := attributes.Get("tlscheck.x509.issuer") + commonName, _ := attributes.Get("tlscheck.x509.cn") + + assert.Equal(t, "CN=NotYetValidIssuer", issuer.AsString()) + assert.Equal(t, "notyetvalid.com", commonName.AsString()) + + // Ensure that timeLeft is positive for a not-yet-valid cert + timeLeft := dp.IntValue() + assert.Greater(t, timeLeft, int64(0), "Time left should be positive for a not-yet-valid cert") +}