Skip to content

Commit

Permalink
Deprecate Scraper.ID func, pass type when register Scraper (open-tele…
Browse files Browse the repository at this point in the history
…metry#11659)

Changes interface to get closer to
open-telemetry#11657

Signed-off-by: Bogdan Drutu <[email protected]>
  • Loading branch information
bogdandrutu authored Nov 13, 2024
1 parent 1f93154 commit 60d3160
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 22 deletions.
25 changes: 25 additions & 0 deletions .chloggen/rm-scraper-id.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: deprecation

# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver)
component: scraperhelper

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Deprecate Scraper.ID func, pass type when register Scraper

# One or more tracking issues or pull requests related to the change
issues: [11238]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [api]
21 changes: 18 additions & 3 deletions receiver/scraperhelper/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func (sf ScrapeFunc) Scrape(ctx context.Context) (pmetric.Metrics, error) {
type Scraper interface {
component.Component

// ID returns the scraper id.
// Deprecated: [v0.114.0] use AddScraperWithType.
ID() component.ID
Scrape(context.Context) (pmetric.Metrics, error)
}
Expand Down Expand Up @@ -67,8 +67,7 @@ func (b *baseScraper) ID() component.ID {
return b.id
}

// NewScraper creates a Scraper that calls Scrape at the specified collection interval,
// reports observability information, and passes the scraped metrics to the next consumer.
// Deprecated: [v0.114.0] use NewScraperWithoutType.
func NewScraper(t component.Type, scrape ScrapeFunc, options ...ScraperOption) (Scraper, error) {
if scrape == nil {
return nil, errNilFunc
Expand All @@ -83,3 +82,19 @@ func NewScraper(t component.Type, scrape ScrapeFunc, options ...ScraperOption) (

return bs, nil
}

// NewScraperWithoutType creates a Scraper that calls Scrape at the specified collection interval,
// reports observability information, and passes the scraped metrics to the next consumer.
func NewScraperWithoutType(scrape ScrapeFunc, options ...ScraperOption) (Scraper, error) {
if scrape == nil {
return nil, errNilFunc
}
bs := &baseScraper{
ScrapeFunc: scrape,
}
for _, op := range options {
op.apply(bs)
}

return bs, nil
}
25 changes: 19 additions & 6 deletions receiver/scraperhelper/scrapercontroller.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,22 @@ func (of scraperControllerOptionFunc) apply(e *controller) {
of(e)
}

// AddScraper configures the provided scrape function to be called
// Deprecated: [v0.114.0] use AddScraperWithType.
func AddScraper(scraper Scraper) ScraperControllerOption {
return AddScraperWithType(scraper.ID().Type(), scraper)
}

// AddScraperWithType configures the provided scrape function to be called
// with the specified options, and at the specified collection interval.
//
// Observability information will be reported, and the scraped metrics
// will be passed to the next consumer.
func AddScraper(scraper Scraper) ScraperControllerOption {
func AddScraperWithType(t component.Type, scraper Scraper) ScraperControllerOption {
return scraperControllerOptionFunc(func(o *controller) {
o.scrapers = append(o.scrapers, scraper)
o.scrapers = append(o.scrapers, scraperWithID{
Scraper: scraper,
id: component.NewID(t),
})
})
}

Expand All @@ -58,7 +66,7 @@ type controller struct {
timeout time.Duration
nextConsumer consumer.Metrics

scrapers []Scraper
scrapers []scraperWithID
obsScrapers []*obsReport

tickerCh <-chan time.Time
Expand All @@ -69,6 +77,11 @@ type controller struct {
obsrecv *receiverhelper.ObsReport
}

type scraperWithID struct {
Scraper
id component.ID
}

// NewScraperControllerReceiver creates a Receiver with the configured options, that can control multiple scrapers.
func NewScraperControllerReceiver(
cfg *ControllerConfig,
Expand Down Expand Up @@ -104,7 +117,7 @@ func NewScraperControllerReceiver(
for i, scraper := range sc.scrapers {
sc.obsScrapers[i], err = newScraper(obsReportSettings{
ReceiverID: sc.id,
Scraper: scraper.ID(),
Scraper: scraper.id,
ReceiverCreateSettings: set,
})
if err != nil {
Expand Down Expand Up @@ -190,7 +203,7 @@ func (sc *controller) scrapeMetricsAndReport() {
md, err := scraper.Scrape(ctx)

if err != nil {
sc.logger.Error("Error scraping metrics", zap.Error(err), zap.Stringer("scraper", scraper.ID()))
sc.logger.Error("Error scraping metrics", zap.Error(err), zap.Stringer("scraper", scraper.id))
if !scrapererror.IsPartialScrapeError(err) {
scrp.EndMetricsOp(ctx, 0, err)
continue
Expand Down
26 changes: 13 additions & 13 deletions receiver/scraperhelper/scrapercontroller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,10 +212,10 @@ func configureMetricOptions(t *testing.T, test metricsTestCase, initializeChs []

scrapeMetricsChs[i] = make(chan int)
tsm := &testScrapeMetrics{ch: scrapeMetricsChs[i], err: test.scrapeErr}
scp, err := NewScraper(component.MustNewType("scraper"), tsm.scrape, scraperOptions...)
scp, err := NewScraperWithoutType(tsm.scrape, scraperOptions...)
require.NoError(t, err)

metricOptions = append(metricOptions, AddScraper(scp))
metricOptions = append(metricOptions, AddScraperWithType(component.MustNewType("scraper"), scp))
}

return metricOptions
Expand Down Expand Up @@ -314,20 +314,20 @@ func TestSingleScrapePerInterval(t *testing.T) {

tickerCh := make(chan time.Time)

scp, err := NewScraper(component.MustNewType("scaper"), tsm.scrape)
scp, err := NewScraperWithoutType(tsm.scrape)
require.NoError(t, err)

receiver, err := NewScraperControllerReceiver(
recv, err := NewScraperControllerReceiver(
cfg,
receivertest.NewNopSettings(),
new(consumertest.MetricsSink),
AddScraper(scp),
AddScraperWithType(component.MustNewType("scaper"), scp),
WithTickerChannel(tickerCh),
)
require.NoError(t, err)

require.NoError(t, receiver.Start(context.Background(), componenttest.NewNopHost()))
defer func() { require.NoError(t, receiver.Shutdown(context.Background())) }()
require.NoError(t, recv.Start(context.Background(), componenttest.NewNopHost()))
defer func() { require.NoError(t, recv.Shutdown(context.Background())) }()

tickerCh <- time.Now()

Expand Down Expand Up @@ -356,7 +356,7 @@ func TestScrapeControllerStartsOnInit(t *testing.T) {
ch: make(chan int, 1),
}

scp, err := NewScraper(component.MustNewType("scraper"), tsm.scrape)
scp, err := NewScraperWithoutType(tsm.scrape)
require.NoError(t, err, "Must not error when creating scraper")

r, err := NewScraperControllerReceiver(
Expand All @@ -366,7 +366,7 @@ func TestScrapeControllerStartsOnInit(t *testing.T) {
},
receivertest.NewNopSettings(),
new(consumertest.MetricsSink),
AddScraper(scp),
AddScraperWithType(component.MustNewType("scaper"), scp),
)
require.NoError(t, err, "Must not error when creating scrape controller")

Expand All @@ -392,7 +392,7 @@ func TestScrapeControllerInitialDelay(t *testing.T) {
}
)

scp, err := NewScraper(component.MustNewType("timed"), func(context.Context) (pmetric.Metrics, error) {
scp, err := NewScraperWithoutType(func(context.Context) (pmetric.Metrics, error) {
elapsed <- time.Now()
return pmetric.NewMetrics(), nil
})
Expand All @@ -402,7 +402,7 @@ func TestScrapeControllerInitialDelay(t *testing.T) {
&cfg,
receivertest.NewNopSettings(),
new(consumertest.MetricsSink),
AddScraper(scp),
AddScraperWithType(component.MustNewType("scaper"), scp),
)
require.NoError(t, err, "Must not error when creating receiver")

Expand All @@ -421,7 +421,7 @@ func TestShutdownBeforeScrapeCanStart(t *testing.T) {
InitialDelay: 5 * time.Second,
}

scp, err := NewScraper(component.MustNewType("timed"), func(context.Context) (pmetric.Metrics, error) {
scp, err := NewScraperWithoutType(func(context.Context) (pmetric.Metrics, error) {
// make the scraper wait for long enough it would disrupt a shutdown.
time.Sleep(30 * time.Second)
return pmetric.NewMetrics(), nil
Expand All @@ -432,7 +432,7 @@ func TestShutdownBeforeScrapeCanStart(t *testing.T) {
&cfg,
receivertest.NewNopSettings(),
new(consumertest.MetricsSink),
AddScraper(scp),
AddScraperWithType(component.MustNewType("scaper"), scp),
)
require.NoError(t, err, "Must not error when creating receiver")
require.NoError(t, r.Start(context.Background(), componenttest.NewNopHost()))
Expand Down

0 comments on commit 60d3160

Please sign in to comment.