diff --git a/.chloggen/deprecate-scrapererror.yaml b/.chloggen/deprecate-scrapererror.yaml new file mode 100644 index 00000000000..16ca330237f --- /dev/null +++ b/.chloggen/deprecate-scrapererror.yaml @@ -0,0 +1,20 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: 'deprecation' + +# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver) +component: receiver/scrapererror + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Deprecate the package in favor of /receiver/scrapererror. + +# One or more tracking issues or pull requests related to the change +issues: [11003] + +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [api] diff --git a/.chloggen/deprecate-scraperhelper.yaml b/.chloggen/deprecate-scraperhelper.yaml new file mode 100644 index 00000000000..9eac10c916f --- /dev/null +++ b/.chloggen/deprecate-scraperhelper.yaml @@ -0,0 +1,20 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: 'deprecation' + +# The name of the component, or a single word describing the area of concern, (e.g. otlpreceiver) +component: eceiver/scraperhelper + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Deprecate the package in favor of /receiver/scraperhelper. + +# One or more tracking issues or pull requests related to the change +issues: [11003] + +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [api] diff --git a/cmd/builder/test/core.builder.yaml b/cmd/builder/test/core.builder.yaml index e6aab6c3c6b..2ecc45e1a82 100644 --- a/cmd/builder/test/core.builder.yaml +++ b/cmd/builder/test/core.builder.yaml @@ -59,6 +59,7 @@ replaces: - go.opentelemetry.io/collector/processor => ${WORKSPACE_DIR}/processor - go.opentelemetry.io/collector/processor/processorprofiles => ${WORKSPACE_DIR}/processor/processorprofiles - go.opentelemetry.io/collector/receiver => ${WORKSPACE_DIR}/receiver + - go.opentelemetry.io/collector/receiver/scraper => ${WORKSPACE_DIR}/receiver/scraper - go.opentelemetry.io/collector/receiver/otlpreceiver => ${WORKSPACE_DIR}/receiver/otlpreceiver - go.opentelemetry.io/collector/receiver/receiverprofiles => ${WORKSPACE_DIR}/receiver/receiverprofiles - go.opentelemetry.io/collector/semconv => ${WORKSPACE_DIR}/semconv diff --git a/cmd/otelcorecol/builder-config.yaml b/cmd/otelcorecol/builder-config.yaml index e3f58c89e34..1a69c3d1285 100644 --- a/cmd/otelcorecol/builder-config.yaml +++ b/cmd/otelcorecol/builder-config.yaml @@ -92,6 +92,7 @@ replaces: - go.opentelemetry.io/collector/processor/memorylimiterprocessor => ../../processor/memorylimiterprocessor - go.opentelemetry.io/collector/processor/processorprofiles => ../../processor/processorprofiles - go.opentelemetry.io/collector/receiver => ../../receiver + - go.opentelemetry.io/collector/receiver/scraper => ../../receiver/scraper - go.opentelemetry.io/collector/receiver/nopreceiver => ../../receiver/nopreceiver - go.opentelemetry.io/collector/receiver/otlpreceiver => ../../receiver/otlpreceiver - go.opentelemetry.io/collector/receiver/receiverprofiles => ../../receiver/receiverprofiles diff --git a/cmd/otelcorecol/go.mod b/cmd/otelcorecol/go.mod index 0854c01ffad..f7fefa71767 100644 --- a/cmd/otelcorecol/go.mod +++ b/cmd/otelcorecol/go.mod @@ -254,6 +254,8 @@ replace go.opentelemetry.io/collector/processor/processorprofiles => ../../proce replace go.opentelemetry.io/collector/receiver => ../../receiver +replace go.opentelemetry.io/collector/receiver/scraper => ../../receiver/scraper + replace go.opentelemetry.io/collector/receiver/nopreceiver => ../../receiver/nopreceiver replace go.opentelemetry.io/collector/receiver/otlpreceiver => ../../receiver/otlpreceiver diff --git a/receiver/scraper/Makefile b/receiver/scraper/Makefile new file mode 100644 index 00000000000..c1496226e59 --- /dev/null +++ b/receiver/scraper/Makefile @@ -0,0 +1 @@ +include ../../Makefile.Common \ No newline at end of file diff --git a/receiver/scraper/go.mod b/receiver/scraper/go.mod new file mode 100644 index 00000000000..b16a651549c --- /dev/null +++ b/receiver/scraper/go.mod @@ -0,0 +1,46 @@ +module go.opentelemetry.io/collector/receiver/scraper + +go 1.22.0 + +require ( + github.com/stretchr/testify v1.9.0 + go.opentelemetry.io/collector/component v0.110.0 + go.opentelemetry.io/collector/consumer v0.110.0 + go.opentelemetry.io/collector/consumer/consumertest v0.110.0 + go.opentelemetry.io/collector/pdata v1.16.0 + go.opentelemetry.io/collector/pipeline v0.110.0 + go.opentelemetry.io/collector/receiver v0.110.0 + go.opentelemetry.io/otel v1.30.0 + go.opentelemetry.io/otel/metric v1.30.0 + go.opentelemetry.io/otel/sdk v1.30.0 + go.opentelemetry.io/otel/trace v1.30.0 + go.uber.org/goleak v1.3.0 + go.uber.org/multierr v1.11.0 + go.uber.org/zap v1.27.0 +) + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + go.opentelemetry.io/collector/component/componentprofiles v0.110.0 // indirect + go.opentelemetry.io/collector/config/configtelemetry v0.110.0 // indirect + go.opentelemetry.io/collector/consumer/consumerprofiles v0.110.0 // indirect + go.opentelemetry.io/collector/internal/globalsignal v0.110.0 // indirect + go.opentelemetry.io/collector/pdata/pprofile v0.110.0 // indirect + go.opentelemetry.io/collector/receiver/receiverprofiles v0.110.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.30.0 // indirect + golang.org/x/net v0.28.0 // indirect + golang.org/x/sys v0.25.0 // indirect + golang.org/x/text v0.17.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect + google.golang.org/grpc v1.66.2 // indirect + google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/receiver/scraper/go.sum b/receiver/scraper/go.sum new file mode 100644 index 00000000000..d99a960d0fc --- /dev/null +++ b/receiver/scraper/go.sum @@ -0,0 +1,122 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.opentelemetry.io/collector/component v0.110.0 h1:z7uSY/1dcK+vTY2z3v0XxeCoi2wqgHTow/ds3Gozuz4= +go.opentelemetry.io/collector/component v0.110.0/go.mod h1:W99gZdfGtQ5Zg6Bhrwrcl/uZcCG+2qBnZ1z2JO5WCW0= +go.opentelemetry.io/collector/component/componentprofiles v0.110.0 h1:YH43aYKPYfnC0TqgI+WlbHsJkTvQPO3ImJybK3oyxQ8= +go.opentelemetry.io/collector/component/componentprofiles v0.110.0/go.mod h1:ZDVFaOhCt6ce2u/HHwqxoV5f+8P2dh0Xut8laYRu4+o= +go.opentelemetry.io/collector/config/configtelemetry v0.110.0 h1:V8Y/Xv7TJpnNGHLrheRKrMydcKBxWYAZ+dj71Kllyos= +go.opentelemetry.io/collector/config/configtelemetry v0.110.0/go.mod h1:R0MBUxjSMVMIhljuDHWIygzzJWQyZHXXWIgQNxcFwhc= +go.opentelemetry.io/collector/consumer v0.110.0 h1:CnB83KSFQxhFAbNJwTM0blahg16xa6CyUnIIA5qPMbA= +go.opentelemetry.io/collector/consumer v0.110.0/go.mod h1:WlzhfwDfwKkEa5XFdN5x9+jjp9ZF5EUSmtOgVe69zm0= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.110.0 h1:KlEGGPFmQN7CFbj8pkiD9J6L820kJgC0zYKqeSFGLEo= +go.opentelemetry.io/collector/consumer/consumerprofiles v0.110.0/go.mod h1:Br4qElhLqAYDMddroNox3CpNv+NxgPgNfGhxFXXxYIw= +go.opentelemetry.io/collector/consumer/consumertest v0.110.0 h1:/rOL4sJr4eSzOp5z6+R7MmuJ5UD6PFOs7S2FA7puE1g= +go.opentelemetry.io/collector/consumer/consumertest v0.110.0/go.mod h1:sKL3UwB6nyH/33JE173o755ekYPwlS/8fs8opTSgPiY= +go.opentelemetry.io/collector/internal/globalsignal v0.110.0 h1:S6bfFEiek8vJeXAbciWS7W8UR6ZrVJB3ftNyFTMHQaY= +go.opentelemetry.io/collector/internal/globalsignal v0.110.0/go.mod h1:GqMXodPWOxK5uqpX8MaMXC2389y2XJTa5nPwf8FYDK8= +go.opentelemetry.io/collector/pdata v1.16.0 h1:g02K8jlRnmQ7TQDuXpdgVL6vIxIVqr5Gbb1qIR27rto= +go.opentelemetry.io/collector/pdata v1.16.0/go.mod h1:YZZJIt2ehxosYf/Y1pbvexjNWsIGNNrzzlCTO9jC1F4= +go.opentelemetry.io/collector/pdata/pprofile v0.110.0 h1:DknuOGOdjYIzVnromhVcW5rWyyjPahf65UAfgXz1xfo= +go.opentelemetry.io/collector/pdata/pprofile v0.110.0/go.mod h1:B3GxiEt40ixeGq2r30AOB3dgKdC281rLw39IqgSTwSM= +go.opentelemetry.io/collector/pdata/testdata v0.110.0 h1:XUXuaKVpD5G7VvTlBCBo/7CDpHvbeWRLXN4zjol94kg= +go.opentelemetry.io/collector/pdata/testdata v0.110.0/go.mod h1:lvpGoQcVDbRjuH3caNIkQ+pkU/+MLKVV4MdNFcp5mxU= +go.opentelemetry.io/collector/pipeline v0.110.0 h1:nArQj8lt2R6ajbbmZ0f7JqkzAzvIOSwxsxDEv9HGKHw= +go.opentelemetry.io/collector/pipeline v0.110.0/go.mod h1:qWk90kohDYBgI/1Kw4DQaQU82+y9GJY8MDse7H2JTWg= +go.opentelemetry.io/collector/receiver v0.110.0 h1:uv+mCadEpWT7yoRvMil1bY44aZbZ7y4oAqsjvypf+t4= +go.opentelemetry.io/collector/receiver v0.110.0/go.mod h1:rTNskk6R+8bU4dlAB1IgdwkIiBl44+C6qcvdyarAyF0= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.110.0 h1:QDbKYVQFlQJfo05qS8O0zyZghxeGmxlVUKIuIJQST6U= +go.opentelemetry.io/collector/receiver/receiverprofiles v0.110.0/go.mod h1:DsNqyNWfax62zb1y2ek2ERzrEAiaJocSfc+QLtHNnxI= +go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= +go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= +go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= +go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= +go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= +go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= +go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= +go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= +go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= +go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/grpc v1.66.2 h1:3QdXkuq3Bkh7w+ywLdLvM56cmGvQHUMZpiCzt6Rqaoo= +google.golang.org/grpc v1.66.2/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/receiver/scraper/scrapererror/doc.go b/receiver/scraper/scrapererror/doc.go new file mode 100644 index 00000000000..96a11c7569a --- /dev/null +++ b/receiver/scraper/scrapererror/doc.go @@ -0,0 +1,5 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// Package scrapererror provides custom error types for scrapers. +package scrapererror // import "go.opentelemetry.io/collector/receiver/scraper/scrapererror" diff --git a/receiver/scraper/scrapererror/package_test.go b/receiver/scraper/scrapererror/package_test.go new file mode 100644 index 00000000000..b414afd1d34 --- /dev/null +++ b/receiver/scraper/scrapererror/package_test.go @@ -0,0 +1,14 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scrapererror + +import ( + "testing" + + "go.uber.org/goleak" +) + +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m) +} diff --git a/receiver/scraper/scrapererror/partialscrapeerror.go b/receiver/scraper/scrapererror/partialscrapeerror.go new file mode 100644 index 00000000000..1b1ef13afb2 --- /dev/null +++ b/receiver/scraper/scrapererror/partialscrapeerror.go @@ -0,0 +1,28 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scrapererror // import "go.opentelemetry.io/collector/receiver/scraper/scrapererror" + +import "errors" + +// PartialScrapeError is an error to represent +// that a subset of metrics were failed to be scraped. +type PartialScrapeError struct { + error + Failed int +} + +// NewPartialScrapeError creates PartialScrapeError for failed metrics. +// Use this error type only when a subset of data was failed to be scraped. +func NewPartialScrapeError(err error, failed int) PartialScrapeError { + return PartialScrapeError{ + error: err, + Failed: failed, + } +} + +// IsPartialScrapeError checks if an error was wrapped with PartialScrapeError. +func IsPartialScrapeError(err error) bool { + var partialScrapeErr PartialScrapeError + return errors.As(err, &partialScrapeErr) +} diff --git a/receiver/scraper/scrapererror/partialscrapeerror_test.go b/receiver/scraper/scrapererror/partialscrapeerror_test.go new file mode 100644 index 00000000000..d744a64abce --- /dev/null +++ b/receiver/scraper/scrapererror/partialscrapeerror_test.go @@ -0,0 +1,28 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scrapererror + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPartialScrapeError(t *testing.T) { + failed := 2 + err := errors.New("some error") + partialErr := NewPartialScrapeError(err, failed) + require.EqualError(t, err, partialErr.Error()) + assert.Equal(t, failed, partialErr.Failed) +} + +func TestIsPartialScrapeError(t *testing.T) { + err := errors.New("testError") + require.False(t, IsPartialScrapeError(err)) + + err = NewPartialScrapeError(err, 2) + require.True(t, IsPartialScrapeError(err)) +} diff --git a/receiver/scraper/scrapererror/scrapeerror.go b/receiver/scraper/scrapererror/scrapeerror.go new file mode 100644 index 00000000000..ac8eaaed86f --- /dev/null +++ b/receiver/scraper/scrapererror/scrapeerror.go @@ -0,0 +1,43 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scrapererror // import "go.opentelemetry.io/collector/receiver/scraper/scrapererror" + +import ( + "go.uber.org/multierr" +) + +// ScrapeErrors contains multiple PartialScrapeErrors and can also contain generic errors. +type ScrapeErrors struct { + errs []error + failedScrapeCount int +} + +// AddPartial adds a PartialScrapeError with the provided failed count and error. +func (s *ScrapeErrors) AddPartial(failed int, err error) { + s.errs = append(s.errs, NewPartialScrapeError(err, failed)) + s.failedScrapeCount += failed +} + +// Add adds a regular error. +func (s *ScrapeErrors) Add(err error) { + s.errs = append(s.errs, err) +} + +// Combine converts a slice of errors into one error. +// It will return a PartialScrapeError if at least one error in the slice is a PartialScrapeError. +func (s *ScrapeErrors) Combine() error { + partialScrapeErr := false + for _, err := range s.errs { + if IsPartialScrapeError(err) { + partialScrapeErr = true + } + } + + combined := multierr.Combine(s.errs...) + if !partialScrapeErr { + return combined + } + + return NewPartialScrapeError(combined, s.failedScrapeCount) +} diff --git a/receiver/scraper/scrapererror/scrapeerror_test.go b/receiver/scraper/scrapererror/scrapeerror_test.go new file mode 100644 index 00000000000..7a47345013a --- /dev/null +++ b/receiver/scraper/scrapererror/scrapeerror_test.go @@ -0,0 +1,106 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scrapererror + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestScrapeErrorsAddPartial(t *testing.T) { + err1 := errors.New("err 1") + err2 := errors.New("err 2") + expected := []error{ + PartialScrapeError{error: err1, Failed: 1}, + PartialScrapeError{error: err2, Failed: 10}, + } + + var errs ScrapeErrors + errs.AddPartial(1, err1) + errs.AddPartial(10, err2) + assert.Equal(t, expected, errs.errs) +} + +func TestScrapeErrorsAdd(t *testing.T) { + err1 := errors.New("err a") + err2 := errors.New("err b") + expected := []error{err1, err2} + + var errs ScrapeErrors + errs.Add(err1) + errs.Add(err2) + assert.Equal(t, expected, errs.errs) +} + +func TestScrapeErrorsCombine(t *testing.T) { + testCases := []struct { + errs func() ScrapeErrors + expectedErr string + expectedFailedCount int + expectNil bool + expectedScrape bool + }{ + { + errs: func() ScrapeErrors { + var errs ScrapeErrors + return errs + }, + expectNil: true, + }, + { + errs: func() ScrapeErrors { + var errs ScrapeErrors + errs.AddPartial(10, errors.New("bad scrapes")) + errs.AddPartial(1, fmt.Errorf("err: %w", errors.New("bad scrape"))) + return errs + }, + expectedErr: "bad scrapes; err: bad scrape", + expectedFailedCount: 11, + expectedScrape: true, + }, + { + errs: func() ScrapeErrors { + var errs ScrapeErrors + errs.Add(errors.New("bad regular")) + errs.Add(fmt.Errorf("err: %w", errors.New("bad reg"))) + return errs + }, + expectedErr: "bad regular; err: bad reg", + }, + { + errs: func() ScrapeErrors { + var errs ScrapeErrors + errs.AddPartial(2, errors.New("bad two scrapes")) + errs.AddPartial(10, fmt.Errorf("%d scrapes failed: %w", 10, errors.New("bad things happened"))) + errs.Add(errors.New("bad event")) + errs.Add(fmt.Errorf("event: %w", errors.New("something happened"))) + return errs + }, + expectedErr: "bad two scrapes; 10 scrapes failed: bad things happened; bad event; event: something happened", + expectedFailedCount: 12, + expectedScrape: true, + }, + } + + for _, tt := range testCases { + scrapeErrs := tt.errs() + if tt.expectNil { + require.NoError(t, scrapeErrs.Combine()) + continue + } + require.EqualError(t, scrapeErrs.Combine(), tt.expectedErr) + if tt.expectedScrape { + var partialScrapeErr PartialScrapeError + if !errors.As(scrapeErrs.Combine(), &partialScrapeErr) { + t.Errorf("%+v.Combine() = %q. Want: PartialScrapeError", scrapeErrs, scrapeErrs.Combine()) + } else if tt.expectedFailedCount != partialScrapeErr.Failed { + t.Errorf("%+v.Combine().Failed. Got %d Failed count. Want: %d", scrapeErrs, partialScrapeErr.Failed, tt.expectedFailedCount) + } + } + } +} diff --git a/receiver/scraper/scraperhelper/config.go b/receiver/scraper/scraperhelper/config.go new file mode 100644 index 00000000000..0efdf679006 --- /dev/null +++ b/receiver/scraper/scraperhelper/config.go @@ -0,0 +1,51 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraper/scraperhelper" + +import ( + "errors" + "fmt" + "time" + + "go.uber.org/multierr" +) + +var ( + errNonPositiveInterval = errors.New("requires positive value") +) + +// ControllerConfig defines common settings for a scraper controller +// configuration. Scraper controller receivers can embed this struct, instead +// of receiver.Settings, and extend it with more fields if needed. +type ControllerConfig struct { + // CollectionInterval sets how frequently the scraper + // should be called and used as the context timeout + // to ensure that scrapers don't exceed the interval. + CollectionInterval time.Duration `mapstructure:"collection_interval"` + // InitialDelay sets the initial start delay for the scraper, + // any non positive value is assumed to be immediately. + InitialDelay time.Duration `mapstructure:"initial_delay"` + // Timeout is an optional value used to set scraper's context deadline. + Timeout time.Duration `mapstructure:"timeout"` +} + +// NewDefaultControllerConfig returns default scraper controller +// settings with a collection interval of one minute. +func NewDefaultControllerConfig() ControllerConfig { + return ControllerConfig{ + CollectionInterval: time.Minute, + InitialDelay: time.Second, + Timeout: 0, + } +} + +func (set *ControllerConfig) Validate() (errs error) { + if set.CollectionInterval <= 0 { + errs = multierr.Append(errs, fmt.Errorf(`"collection_interval": %w`, errNonPositiveInterval)) + } + if set.Timeout < 0 { + errs = multierr.Append(errs, fmt.Errorf(`"timeout": %w`, errNonPositiveInterval)) + } + return errs +} diff --git a/receiver/scraper/scraperhelper/config_test.go b/receiver/scraper/scraperhelper/config_test.go new file mode 100644 index 00000000000..1da08e78006 --- /dev/null +++ b/receiver/scraper/scraperhelper/config_test.go @@ -0,0 +1,52 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestScrapeControllerSettings(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + set ControllerConfig + errVal string + }{ + { + name: "default configuration", + set: NewDefaultControllerConfig(), + errVal: "", + }, + { + name: "zero value configuration", + set: ControllerConfig{}, + errVal: `"collection_interval": requires positive value`, + }, + { + name: "invalid timeout", + set: ControllerConfig{ + CollectionInterval: time.Minute, + Timeout: -1 * time.Minute, + }, + errVal: `"timeout": requires positive value`, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + err := tc.set.Validate() + if tc.errVal == "" { + assert.NoError(t, err, "Must not error") + return + } + assert.EqualError(t, err, tc.errVal, "Must match the expected error") + }) + } +} diff --git a/receiver/scraper/scraperhelper/doc.go b/receiver/scraper/scraperhelper/doc.go new file mode 100644 index 00000000000..03ef439433e --- /dev/null +++ b/receiver/scraper/scraperhelper/doc.go @@ -0,0 +1,7 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +//go:generate mdatagen metadata.yaml + +// Package scraperhelper provides utilities for scrapers. +package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraper/scraperhelper" diff --git a/receiver/scraper/scraperhelper/documentation.md b/receiver/scraper/scraperhelper/documentation.md new file mode 100644 index 00000000000..56ad47a5105 --- /dev/null +++ b/receiver/scraper/scraperhelper/documentation.md @@ -0,0 +1,23 @@ +[comment]: <> (Code generated by mdatagen. DO NOT EDIT.) + +# scraperhelper + +## Internal Telemetry + +The following telemetry is emitted by this component. + +### otelcol_scraper_errored_metric_points + +Number of metric points that were unable to be scraped. + +| Unit | Metric Type | Value Type | Monotonic | +| ---- | ----------- | ---------- | --------- | +| {datapoints} | Sum | Int | true | + +### otelcol_scraper_scraped_metric_points + +Number of metric points successfully scraped. + +| Unit | Metric Type | Value Type | Monotonic | +| ---- | ----------- | ---------- | --------- | +| {datapoints} | Sum | Int | true | diff --git a/receiver/scraper/scraperhelper/generated_component_telemetry_test.go b/receiver/scraper/scraperhelper/generated_component_telemetry_test.go new file mode 100644 index 00000000000..0610d16424b --- /dev/null +++ b/receiver/scraper/scraperhelper/generated_component_telemetry_test.go @@ -0,0 +1,81 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package scraperhelper + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/metric" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + "go.opentelemetry.io/otel/sdk/metric/metricdata" + "go.opentelemetry.io/otel/sdk/metric/metricdata/metricdatatest" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configtelemetry" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/receivertest" +) + +type componentTestTelemetry struct { + reader *sdkmetric.ManualReader + meterProvider *sdkmetric.MeterProvider +} + +func (tt *componentTestTelemetry) NewSettings() receiver.Settings { + settings := receivertest.NewNopSettings() + settings.MeterProvider = tt.meterProvider + settings.LeveledMeterProvider = func(_ configtelemetry.Level) metric.MeterProvider { + return tt.meterProvider + } + settings.ID = component.NewID(component.MustNewType("scraperhelper")) + + return settings +} + +func setupTestTelemetry() componentTestTelemetry { + reader := sdkmetric.NewManualReader() + return componentTestTelemetry{ + reader: reader, + meterProvider: sdkmetric.NewMeterProvider(sdkmetric.WithReader(reader)), + } +} + +func (tt *componentTestTelemetry) assertMetrics(t *testing.T, expected []metricdata.Metrics) { + var md metricdata.ResourceMetrics + require.NoError(t, tt.reader.Collect(context.Background(), &md)) + // ensure all required metrics are present + for _, want := range expected { + got := tt.getMetric(want.Name, md) + metricdatatest.AssertEqual(t, want, got, metricdatatest.IgnoreTimestamp()) + } + + // ensure no additional metrics are emitted + require.Equal(t, len(expected), tt.len(md)) +} + +func (tt *componentTestTelemetry) getMetric(name string, got metricdata.ResourceMetrics) metricdata.Metrics { + for _, sm := range got.ScopeMetrics { + for _, m := range sm.Metrics { + if m.Name == name { + return m + } + } + } + + return metricdata.Metrics{} +} + +func (tt *componentTestTelemetry) len(got metricdata.ResourceMetrics) int { + metricsCount := 0 + for _, sm := range got.ScopeMetrics { + metricsCount += len(sm.Metrics) + } + + return metricsCount +} + +func (tt *componentTestTelemetry) Shutdown(ctx context.Context) error { + return tt.meterProvider.Shutdown(ctx) +} diff --git a/cmd/mdatagen/generated_package_test.go b/receiver/scraper/scraperhelper/generated_package_test.go similarity index 87% rename from cmd/mdatagen/generated_package_test.go rename to receiver/scraper/scraperhelper/generated_package_test.go index 2a21d3c7c86..b88f94c44a0 100644 --- a/cmd/mdatagen/generated_package_test.go +++ b/receiver/scraper/scraperhelper/generated_package_test.go @@ -1,6 +1,6 @@ // Code generated by mdatagen. DO NOT EDIT. -package main +package scraperhelper import ( "testing" diff --git a/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry.go b/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry.go new file mode 100644 index 00000000000..78afae9e9bb --- /dev/null +++ b/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry.go @@ -0,0 +1,70 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package metadata + +import ( + "errors" + + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configtelemetry" +) + +// Deprecated: [v0.108.0] use LeveledMeter instead. +func Meter(settings component.TelemetrySettings) metric.Meter { + return settings.MeterProvider.Meter("go.opentelemetry.io/collector/receiver/scraper/scraperhelper") +} + +func LeveledMeter(settings component.TelemetrySettings, level configtelemetry.Level) metric.Meter { + return settings.LeveledMeterProvider(level).Meter("go.opentelemetry.io/collector/receiver/scraper/scraperhelper") +} + +func Tracer(settings component.TelemetrySettings) trace.Tracer { + return settings.TracerProvider.Tracer("go.opentelemetry.io/collector/receiver/scraper/scraperhelper") +} + +// TelemetryBuilder provides an interface for components to report telemetry +// as defined in metadata and user config. +type TelemetryBuilder struct { + meter metric.Meter + ScraperErroredMetricPoints metric.Int64Counter + ScraperScrapedMetricPoints metric.Int64Counter + meters map[configtelemetry.Level]metric.Meter +} + +// TelemetryBuilderOption applies changes to default builder. +type TelemetryBuilderOption interface { + apply(*TelemetryBuilder) +} + +type telemetryBuilderOptionFunc func(mb *TelemetryBuilder) + +func (tbof telemetryBuilderOptionFunc) apply(mb *TelemetryBuilder) { + tbof(mb) +} + +// NewTelemetryBuilder provides a struct with methods to update all internal telemetry +// for a component +func NewTelemetryBuilder(settings component.TelemetrySettings, options ...TelemetryBuilderOption) (*TelemetryBuilder, error) { + builder := TelemetryBuilder{meters: map[configtelemetry.Level]metric.Meter{}} + for _, op := range options { + op.apply(&builder) + } + builder.meters[configtelemetry.LevelBasic] = LeveledMeter(settings, configtelemetry.LevelBasic) + var err, errs error + builder.ScraperErroredMetricPoints, err = builder.meters[configtelemetry.LevelBasic].Int64Counter( + "otelcol_scraper_errored_metric_points", + metric.WithDescription("Number of metric points that were unable to be scraped."), + metric.WithUnit("{datapoints}"), + ) + errs = errors.Join(errs, err) + builder.ScraperScrapedMetricPoints, err = builder.meters[configtelemetry.LevelBasic].Int64Counter( + "otelcol_scraper_scraped_metric_points", + metric.WithDescription("Number of metric points successfully scraped."), + metric.WithUnit("{datapoints}"), + ) + errs = errors.Join(errs, err) + return &builder, errs +} diff --git a/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry_test.go b/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry_test.go new file mode 100644 index 00000000000..7f89917a498 --- /dev/null +++ b/receiver/scraper/scraperhelper/internal/metadata/generated_telemetry_test.go @@ -0,0 +1,83 @@ +// Code generated by mdatagen. DO NOT EDIT. + +package metadata + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/metric" + embeddedmetric "go.opentelemetry.io/otel/metric/embedded" + noopmetric "go.opentelemetry.io/otel/metric/noop" + "go.opentelemetry.io/otel/trace" + embeddedtrace "go.opentelemetry.io/otel/trace/embedded" + nooptrace "go.opentelemetry.io/otel/trace/noop" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/config/configtelemetry" +) + +type mockMeter struct { + noopmetric.Meter + name string +} +type mockMeterProvider struct { + embeddedmetric.MeterProvider +} + +func (m mockMeterProvider) Meter(name string, opts ...metric.MeterOption) metric.Meter { + return mockMeter{name: name} +} + +type mockTracer struct { + nooptrace.Tracer + name string +} + +type mockTracerProvider struct { + embeddedtrace.TracerProvider +} + +func (m mockTracerProvider) Tracer(name string, opts ...trace.TracerOption) trace.Tracer { + return mockTracer{name: name} +} + +func TestProviders(t *testing.T) { + set := component.TelemetrySettings{ + LeveledMeterProvider: func(_ configtelemetry.Level) metric.MeterProvider { + return mockMeterProvider{} + }, + MeterProvider: mockMeterProvider{}, + TracerProvider: mockTracerProvider{}, + } + + meter := Meter(set) + if m, ok := meter.(mockMeter); ok { + require.Equal(t, "go.opentelemetry.io/collector/receiver/scraper/scraperhelper", m.name) + } else { + require.Fail(t, "returned Meter not mockMeter") + } + + tracer := Tracer(set) + if m, ok := tracer.(mockTracer); ok { + require.Equal(t, "go.opentelemetry.io/collector/receiver/scraper/scraperhelper", m.name) + } else { + require.Fail(t, "returned Meter not mockTracer") + } +} + +func TestNewTelemetryBuilder(t *testing.T) { + set := component.TelemetrySettings{ + LeveledMeterProvider: func(_ configtelemetry.Level) metric.MeterProvider { + return mockMeterProvider{} + }, + MeterProvider: mockMeterProvider{}, + TracerProvider: mockTracerProvider{}, + } + applied := false + _, err := NewTelemetryBuilder(set, telemetryBuilderOptionFunc(func(b *TelemetryBuilder) { + applied = true + })) + require.NoError(t, err) + require.True(t, applied) +} diff --git a/receiver/scraper/scraperhelper/metadata.yaml b/receiver/scraper/scraperhelper/metadata.yaml new file mode 100644 index 00000000000..011002be539 --- /dev/null +++ b/receiver/scraper/scraperhelper/metadata.yaml @@ -0,0 +1,27 @@ +type: scraperhelper +github_project: open-telemetry/opentelemetry-collector + +status: + class: receiver + not_component: true + stability: + beta: [traces, metrics, logs] + distributions: [core, contrib] + +telemetry: + metrics: + scraper_scraped_metric_points: + enabled: true + description: Number of metric points successfully scraped. + unit: "{datapoints}" + sum: + value_type: int + monotonic: true + + scraper_errored_metric_points: + enabled: true + description: Number of metric points that were unable to be scraped. + unit: "{datapoints}" + sum: + value_type: int + monotonic: true \ No newline at end of file diff --git a/receiver/scraper/scraperhelper/obsreport.go b/receiver/scraper/scraperhelper/obsreport.go new file mode 100644 index 00000000000..7aa12951f61 --- /dev/null +++ b/receiver/scraper/scraperhelper/obsreport.go @@ -0,0 +1,108 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraper/scraperhelper" + +import ( + "context" + "errors" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + "go.opentelemetry.io/otel/metric" + "go.opentelemetry.io/otel/trace" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/pipeline" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/internal" + "go.opentelemetry.io/collector/receiver/scraper/scraperhelper/internal/metadata" + "go.opentelemetry.io/collector/receiver/scrapererror" +) + +// obsReport is a helper to add observability to a scraper. +type obsReport struct { + receiverID component.ID + scraper component.ID + tracer trace.Tracer + + otelAttrs []attribute.KeyValue + telemetryBuilder *metadata.TelemetryBuilder +} + +// obsReportSettings are settings for creating an ObsReport. +type obsReportSettings struct { + ReceiverID component.ID + Scraper component.ID + ReceiverCreateSettings receiver.Settings +} + +func newScraper(cfg obsReportSettings) (*obsReport, error) { + telemetryBuilder, err := metadata.NewTelemetryBuilder(cfg.ReceiverCreateSettings.TelemetrySettings) + if err != nil { + return nil, err + } + return &obsReport{ + receiverID: cfg.ReceiverID, + scraper: cfg.Scraper, + tracer: cfg.ReceiverCreateSettings.TracerProvider.Tracer(cfg.Scraper.String()), + + otelAttrs: []attribute.KeyValue{ + attribute.String(internal.ReceiverKey, cfg.ReceiverID.String()), + attribute.String(internal.ScraperKey, cfg.Scraper.String()), + }, + telemetryBuilder: telemetryBuilder, + }, nil +} + +// StartMetricsOp is called when a scrape operation is started. The +// returned context should be used in other calls to the obsreport functions +// dealing with the same scrape operation. +func (s *obsReport) StartMetricsOp(ctx context.Context) context.Context { + spanName := internal.ScraperPrefix + s.receiverID.String() + internal.SpanNameSep + s.scraper.String() + internal.ScraperMetricsOperationSuffix + ctx, _ = s.tracer.Start(ctx, spanName) + return ctx +} + +// EndMetricsOp completes the scrape operation that was started with +// StartMetricsOp. +func (s *obsReport) EndMetricsOp( + scraperCtx context.Context, + numScrapedMetrics int, + err error, +) { + numErroredMetrics := 0 + if err != nil { + var partialErr scrapererror.PartialScrapeError + if errors.As(err, &partialErr) { + numErroredMetrics = partialErr.Failed + } else { + numErroredMetrics = numScrapedMetrics + numScrapedMetrics = 0 + } + } + + span := trace.SpanFromContext(scraperCtx) + + s.recordMetrics(scraperCtx, numScrapedMetrics, numErroredMetrics) + + // end span according to errors + if span.IsRecording() { + span.SetAttributes( + attribute.String(internal.FormatKey, pipeline.SignalMetrics.String()), + attribute.Int64(internal.ScrapedMetricPointsKey, int64(numScrapedMetrics)), + attribute.Int64(internal.ErroredMetricPointsKey, int64(numErroredMetrics)), + ) + + if err != nil { + span.SetStatus(codes.Error, err.Error()) + } + } + + span.End() +} + +func (s *obsReport) recordMetrics(scraperCtx context.Context, numScrapedMetrics, numErroredMetrics int) { + s.telemetryBuilder.ScraperScrapedMetricPoints.Add(scraperCtx, int64(numScrapedMetrics), metric.WithAttributes(s.otelAttrs...)) + s.telemetryBuilder.ScraperErroredMetricPoints.Add(scraperCtx, int64(numErroredMetrics), metric.WithAttributes(s.otelAttrs...)) +} diff --git a/receiver/scraper/scraperhelper/obsreport_test.go b/receiver/scraper/scraperhelper/obsreport_test.go new file mode 100644 index 00000000000..2240ce2eff6 --- /dev/null +++ b/receiver/scraper/scraperhelper/obsreport_test.go @@ -0,0 +1,120 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/internal" + "go.opentelemetry.io/collector/receiver/scrapererror" +) + +var ( + receiverID = component.MustNewID("fakeReceiver") + scraperID = component.MustNewID("fakeScraper") + + errFake = errors.New("errFake") + partialErrFake = scrapererror.NewPartialScrapeError(errFake, 1) +) + +type testParams struct { + items int + err error +} + +func TestScrapeMetricsDataOp(t *testing.T) { + testTelemetry(t, receiverID, func(t *testing.T, tt componenttest.TestTelemetry) { + parentCtx, parentSpan := tt.TelemetrySettings().TracerProvider.Tracer("test").Start(context.Background(), t.Name()) + defer parentSpan.End() + + params := []testParams{ + {items: 23, err: partialErrFake}, + {items: 29, err: errFake}, + {items: 15, err: nil}, + } + for i := range params { + scrp, err := newScraper(obsReportSettings{ + ReceiverID: receiverID, + Scraper: scraperID, + ReceiverCreateSettings: receiver.Settings{ID: receiverID, TelemetrySettings: tt.TelemetrySettings(), BuildInfo: component.NewDefaultBuildInfo()}, + }) + require.NoError(t, err) + ctx := scrp.StartMetricsOp(parentCtx) + assert.NotNil(t, ctx) + scrp.EndMetricsOp(ctx, params[i].items, params[i].err) + } + + spans := tt.SpanRecorder.Ended() + require.Equal(t, len(params), len(spans)) + + var scrapedMetricPoints, erroredMetricPoints int + for i, span := range spans { + assert.Equal(t, "scraper/"+receiverID.String()+"/"+scraperID.String()+"/MetricsScraped", span.Name()) + switch { + case params[i].err == nil: + scrapedMetricPoints += params[i].items + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ScrapedMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ErroredMetricPointsKey, Value: attribute.Int64Value(0)}) + assert.Equal(t, codes.Unset, span.Status().Code) + case errors.Is(params[i].err, errFake): + erroredMetricPoints += params[i].items + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ScrapedMetricPointsKey, Value: attribute.Int64Value(0)}) + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ErroredMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) + assert.Equal(t, codes.Error, span.Status().Code) + assert.Equal(t, params[i].err.Error(), span.Status().Description) + + case errors.Is(params[i].err, partialErrFake): + scrapedMetricPoints += params[i].items + erroredMetricPoints++ + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ScrapedMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) + require.Contains(t, span.Attributes(), attribute.KeyValue{Key: internal.ErroredMetricPointsKey, Value: attribute.Int64Value(1)}) + assert.Equal(t, codes.Error, span.Status().Code) + assert.Equal(t, params[i].err.Error(), span.Status().Description) + default: + t.Fatalf("unexpected err param: %v", params[i].err) + } + } + + require.NoError(t, tt.CheckScraperMetrics(receiverID, scraperID, int64(scrapedMetricPoints), int64(erroredMetricPoints))) + }) +} + +func TestCheckScraperMetricsViews(t *testing.T) { + tt, err := componenttest.SetupTelemetry(receiverID) + require.NoError(t, err) + t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) + + s, err := newScraper(obsReportSettings{ + ReceiverID: receiverID, + Scraper: scraperID, + ReceiverCreateSettings: receiver.Settings{ID: receiverID, TelemetrySettings: tt.TelemetrySettings(), BuildInfo: component.NewDefaultBuildInfo()}, + }) + require.NoError(t, err) + ctx := s.StartMetricsOp(context.Background()) + require.NotNil(t, ctx) + s.EndMetricsOp(ctx, 7, nil) + + require.NoError(t, tt.CheckScraperMetrics(receiverID, scraperID, 7, 0)) + require.Error(t, tt.CheckScraperMetrics(receiverID, scraperID, 7, 7)) + require.Error(t, tt.CheckScraperMetrics(receiverID, scraperID, 0, 0)) + assert.Error(t, tt.CheckScraperMetrics(receiverID, scraperID, 0, 7)) +} + +func testTelemetry(t *testing.T, id component.ID, testFunc func(t *testing.T, tt componenttest.TestTelemetry)) { + tt, err := componenttest.SetupTelemetry(id) + require.NoError(t, err) + t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) + + testFunc(t, tt) +} diff --git a/receiver/scraper/scraperhelper/scraper.go b/receiver/scraper/scraperhelper/scraper.go new file mode 100644 index 00000000000..eb94ede2d1e --- /dev/null +++ b/receiver/scraper/scraperhelper/scraper.go @@ -0,0 +1,85 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraper/scraperhelper" + +import ( + "context" + "errors" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/pdata/pmetric" +) + +var errNilFunc = errors.New("nil scrape func") + +// ScrapeFunc scrapes metrics. +type ScrapeFunc func(context.Context) (pmetric.Metrics, error) + +func (sf ScrapeFunc) Scrape(ctx context.Context) (pmetric.Metrics, error) { + return sf(ctx) +} + +// Scraper is the base interface for scrapers. +type Scraper interface { + component.Component + + // ID returns the scraper id. + ID() component.ID + Scrape(context.Context) (pmetric.Metrics, error) +} + +// ScraperOption apply changes to internal options. +type ScraperOption interface { + apply(*baseScraper) +} + +type scraperOptionFunc func(*baseScraper) + +func (of scraperOptionFunc) apply(e *baseScraper) { + of(e) +} + +// WithStart sets the function that will be called on startup. +func WithStart(start component.StartFunc) ScraperOption { + return scraperOptionFunc(func(o *baseScraper) { + o.StartFunc = start + }) +} + +// WithShutdown sets the function that will be called on shutdown. +func WithShutdown(shutdown component.ShutdownFunc) ScraperOption { + return scraperOptionFunc(func(o *baseScraper) { + o.ShutdownFunc = shutdown + }) +} + +var _ Scraper = (*baseScraper)(nil) + +type baseScraper struct { + component.StartFunc + component.ShutdownFunc + ScrapeFunc + id component.ID +} + +func (b *baseScraper) ID() component.ID { + return b.id +} + +// NewScraper creates a Scraper that calls Scrape at the specified collection interval, +// reports observability information, and passes the scraped metrics to the next consumer. +func NewScraper(t component.Type, scrape ScrapeFunc, options ...ScraperOption) (Scraper, error) { + if scrape == nil { + return nil, errNilFunc + } + bs := &baseScraper{ + ScrapeFunc: scrape, + id: component.NewID(t), + } + for _, op := range options { + op.apply(bs) + } + + return bs, nil +} diff --git a/receiver/scraper/scraperhelper/scrapercontroller.go b/receiver/scraper/scraperhelper/scrapercontroller.go new file mode 100644 index 00000000000..f08a3909f93 --- /dev/null +++ b/receiver/scraper/scraperhelper/scrapercontroller.go @@ -0,0 +1,234 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraper/scraperhelper" + +import ( + "context" + "errors" + "time" + + "go.uber.org/multierr" + "go.uber.org/zap" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" + "go.opentelemetry.io/collector/pdata/pmetric" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/receiverhelper" + "go.opentelemetry.io/collector/receiver/scrapererror" +) + +// ScraperControllerOption apply changes to internal options. +type ScraperControllerOption interface { + apply(*controller) +} + +type scraperControllerOptionFunc func(*controller) + +func (of scraperControllerOptionFunc) apply(e *controller) { + of(e) +} + +// AddScraper configures the provided scrape function to be called +// with the specified options, and at the specified collection interval. +// +// Observability information will be reported, and the scraped metrics +// will be passed to the next consumer. +func AddScraper(scraper Scraper) ScraperControllerOption { + return scraperControllerOptionFunc(func(o *controller) { + o.scrapers = append(o.scrapers, scraper) + }) +} + +// WithTickerChannel allows you to override the scraper controller's ticker +// channel to specify when scrape is called. This is only expected to be +// used by tests. +func WithTickerChannel(tickerCh <-chan time.Time) ScraperControllerOption { + return scraperControllerOptionFunc(func(o *controller) { + o.tickerCh = tickerCh + }) +} + +type controller struct { + id component.ID + logger *zap.Logger + collectionInterval time.Duration + initialDelay time.Duration + timeout time.Duration + nextConsumer consumer.Metrics + + scrapers []Scraper + obsScrapers []*obsReport + + tickerCh <-chan time.Time + + initialized bool + done chan struct{} + terminated chan struct{} + + obsrecv *receiverhelper.ObsReport + recvSettings receiver.Settings +} + +// NewScraperControllerReceiver creates a Receiver with the configured options, that can control multiple scrapers. +func NewScraperControllerReceiver( + cfg *ControllerConfig, + set receiver.Settings, + nextConsumer consumer.Metrics, + options ...ScraperControllerOption, +) (component.Component, error) { + + if cfg.CollectionInterval <= 0 { + return nil, errors.New("collection_interval must be a positive duration") + } + + obsrecv, err := receiverhelper.NewObsReport(receiverhelper.ObsReportSettings{ + ReceiverID: set.ID, + Transport: "", + ReceiverCreateSettings: set, + }) + if err != nil { + return nil, err + } + + sc := &controller{ + id: set.ID, + logger: set.Logger, + collectionInterval: cfg.CollectionInterval, + initialDelay: cfg.InitialDelay, + timeout: cfg.Timeout, + nextConsumer: nextConsumer, + done: make(chan struct{}), + terminated: make(chan struct{}), + obsrecv: obsrecv, + recvSettings: set, + } + + for _, op := range options { + op.apply(sc) + } + + sc.obsScrapers = make([]*obsReport, len(sc.scrapers)) + for i, scraper := range sc.scrapers { + scrp, err := newScraper(obsReportSettings{ + ReceiverID: sc.id, + Scraper: scraper.ID(), + ReceiverCreateSettings: sc.recvSettings, + }) + + sc.obsScrapers[i] = scrp + + if err != nil { + return nil, err + } + } + + return sc, nil +} + +// Start the receiver, invoked during service start. +func (sc *controller) Start(ctx context.Context, host component.Host) error { + for _, scraper := range sc.scrapers { + if err := scraper.Start(ctx, host); err != nil { + return err + } + } + + sc.initialized = true + sc.startScraping() + return nil +} + +// Shutdown the receiver, invoked during service shutdown. +func (sc *controller) Shutdown(ctx context.Context) error { + sc.stopScraping() + + // wait until scraping ticker has terminated + if sc.initialized { + <-sc.terminated + } + + var errs error + for _, scraper := range sc.scrapers { + errs = multierr.Append(errs, scraper.Shutdown(ctx)) + } + + return errs +} + +// startScraping initiates a ticker that calls Scrape based on the configured +// collection interval. +func (sc *controller) startScraping() { + go func() { + if sc.initialDelay > 0 { + <-time.After(sc.initialDelay) + } + + if sc.tickerCh == nil { + ticker := time.NewTicker(sc.collectionInterval) + defer ticker.Stop() + + sc.tickerCh = ticker.C + } + // Call scrape method on initialization to ensure + // that scrapers start from when the component starts + // instead of waiting for the full duration to start. + sc.scrapeMetricsAndReport() + for { + select { + case <-sc.tickerCh: + sc.scrapeMetricsAndReport() + case <-sc.done: + sc.terminated <- struct{}{} + return + } + } + }() +} + +// scrapeMetricsAndReport calls the Scrape function for each of the configured +// Scrapers, records observability information, and passes the scraped metrics +// to the next component. +func (sc *controller) scrapeMetricsAndReport() { + ctx, done := withScrapeContext(sc.timeout) + defer done() + + metrics := pmetric.NewMetrics() + + for i, scraper := range sc.scrapers { + scrp := sc.obsScrapers[i] + ctx = scrp.StartMetricsOp(ctx) + md, err := scraper.Scrape(ctx) + + if err != nil { + sc.logger.Error("Error scraping metrics", zap.Error(err), zap.Stringer("scraper", scraper.ID())) + if !scrapererror.IsPartialScrapeError(err) { + scrp.EndMetricsOp(ctx, 0, err) + continue + } + } + scrp.EndMetricsOp(ctx, md.MetricCount(), err) + md.ResourceMetrics().MoveAndAppendTo(metrics.ResourceMetrics()) + } + + dataPointCount := metrics.DataPointCount() + ctx = sc.obsrecv.StartMetricsOp(ctx) + err := sc.nextConsumer.ConsumeMetrics(ctx, metrics) + sc.obsrecv.EndMetricsOp(ctx, "", dataPointCount, err) +} + +// stopScraping stops the ticker +func (sc *controller) stopScraping() { + close(sc.done) +} + +// withScrapeContext will return a context that has no deadline if timeout is 0 +// which implies no explicit timeout had occurred, otherwise, a context +// with a deadline of the provided timeout is returned. +func withScrapeContext(timeout time.Duration) (context.Context, context.CancelFunc) { + if timeout == 0 { + return context.WithCancel(context.Background()) + } + return context.WithTimeout(context.Background(), timeout) +} diff --git a/receiver/scraper/scraperhelper/scrapercontroller_test.go b/receiver/scraper/scraperhelper/scrapercontroller_test.go new file mode 100644 index 00000000000..7d9fc8101e2 --- /dev/null +++ b/receiver/scraper/scraperhelper/scrapercontroller_test.go @@ -0,0 +1,427 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package scraperhelper + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.opentelemetry.io/otel/codes" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + "go.uber.org/multierr" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/consumer/consumertest" + "go.opentelemetry.io/collector/pdata/pmetric" + "go.opentelemetry.io/collector/receiver" + "go.opentelemetry.io/collector/receiver/receivertest" + "go.opentelemetry.io/collector/receiver/scrapererror" +) + +type testInitialize struct { + ch chan bool + err error +} + +func (ts *testInitialize) start(context.Context, component.Host) error { + ts.ch <- true + return ts.err +} + +type testClose struct { + ch chan bool + err error +} + +func (ts *testClose) shutdown(context.Context) error { + ts.ch <- true + return ts.err +} + +type testScrapeMetrics struct { + ch chan int + timesScrapeCalled int + err error +} + +func (ts *testScrapeMetrics) scrape(context.Context) (pmetric.Metrics, error) { + ts.timesScrapeCalled++ + ts.ch <- ts.timesScrapeCalled + + if ts.err != nil { + return pmetric.Metrics{}, ts.err + } + + md := pmetric.NewMetrics() + md.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics().AppendEmpty().SetEmptyGauge().DataPoints().AppendEmpty() + return md, nil +} + +func newTestNoDelaySettings() *ControllerConfig { + return &ControllerConfig{ + CollectionInterval: time.Second, + InitialDelay: 0, + } +} + +type metricsTestCase struct { + name string + + scrapers int + scraperControllerSettings *ControllerConfig + scrapeErr error + expectedNewErr string + expectScraped bool + + initialize bool + close bool + initializeErr error + closeErr error +} + +func TestScrapeController(t *testing.T) { + testCases := []metricsTestCase{ + { + name: "NoScrapers", + }, + { + name: "AddMetricsScrapersWithCollectionInterval", + scrapers: 2, + expectScraped: true, + }, + { + name: "AddMetricsScrapersWithCollectionInterval_InvalidCollectionIntervalError", + scrapers: 2, + scraperControllerSettings: &ControllerConfig{CollectionInterval: -time.Millisecond}, + expectedNewErr: "collection_interval must be a positive duration", + }, + { + name: "AddMetricsScrapers_ScrapeError", + scrapers: 2, + scrapeErr: errors.New("err1"), + }, + { + name: "AddMetricsScrapersWithInitializeAndClose", + scrapers: 2, + initialize: true, + expectScraped: true, + close: true, + }, + { + name: "AddMetricsScrapersWithInitializeAndCloseErrors", + scrapers: 2, + initialize: true, + close: true, + initializeErr: errors.New("err1"), + closeErr: errors.New("err2"), + }, + } + + for _, tt := range testCases { + test := tt + t.Run(test.name, func(t *testing.T) { + receiverID := component.MustNewID("receiver") + tt, err := componenttest.SetupTelemetry(receiverID) + require.NoError(t, err) + t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) + + initializeChs := make([]chan bool, test.scrapers) + scrapeMetricsChs := make([]chan int, test.scrapers) + closeChs := make([]chan bool, test.scrapers) + options := configureMetricOptions(t, test, initializeChs, scrapeMetricsChs, closeChs) + + tickerCh := make(chan time.Time) + options = append(options, WithTickerChannel(tickerCh)) + + sink := new(consumertest.MetricsSink) + cfg := newTestNoDelaySettings() + if test.scraperControllerSettings != nil { + cfg = test.scraperControllerSettings + } + + mr, err := NewScraperControllerReceiver(cfg, receiver.Settings{ID: receiverID, TelemetrySettings: tt.TelemetrySettings(), BuildInfo: component.NewDefaultBuildInfo()}, sink, options...) + if test.expectedNewErr != "" { + assert.EqualError(t, err, test.expectedNewErr) + return + } + require.NoError(t, err) + + err = mr.Start(context.Background(), componenttest.NewNopHost()) + expectedStartErr := getExpectedStartErr(test) + if expectedStartErr != nil { + assert.Equal(t, expectedStartErr, err) + } else if test.initialize { + assertChannelsCalled(t, initializeChs, "start was not called") + } + + const iterations = 5 + + if test.expectScraped || test.scrapeErr != nil { + // validate that scrape is called at least N times for each configured scraper + for _, ch := range scrapeMetricsChs { + <-ch + } + // Consume the initial scrapes on start + for i := 0; i < iterations; i++ { + tickerCh <- time.Now() + + for _, ch := range scrapeMetricsChs { + <-ch + } + } + + // wait until all calls to scrape have completed + if test.scrapeErr == nil { + require.Eventually(t, func() bool { + return sink.DataPointCount() == (1+iterations)*(test.scrapers) + }, time.Second, time.Millisecond) + } + + if test.expectScraped { + assert.GreaterOrEqual(t, sink.DataPointCount(), iterations) + } + + spans := tt.SpanRecorder.Ended() + assertReceiverSpan(t, spans) + assertReceiverViews(t, tt, sink) + assertScraperSpan(t, test.scrapeErr, spans) + assertScraperViews(t, tt, test.scrapeErr, sink) + } + + err = mr.Shutdown(context.Background()) + expectedShutdownErr := getExpectedShutdownErr(test) + if expectedShutdownErr != nil { + assert.EqualError(t, err, expectedShutdownErr.Error()) + } else if test.close { + assertChannelsCalled(t, closeChs, "shutdown was not called") + } + }) + } +} + +func configureMetricOptions(t *testing.T, test metricsTestCase, initializeChs []chan bool, scrapeMetricsChs []chan int, closeChs []chan bool) []ScraperControllerOption { + var metricOptions []ScraperControllerOption + + for i := 0; i < test.scrapers; i++ { + var scraperOptions []ScraperOption + if test.initialize { + initializeChs[i] = make(chan bool, 1) + ti := &testInitialize{ch: initializeChs[i], err: test.initializeErr} + scraperOptions = append(scraperOptions, WithStart(ti.start)) + } + if test.close { + closeChs[i] = make(chan bool, 1) + tc := &testClose{ch: closeChs[i], err: test.closeErr} + scraperOptions = append(scraperOptions, WithShutdown(tc.shutdown)) + } + + scrapeMetricsChs[i] = make(chan int) + tsm := &testScrapeMetrics{ch: scrapeMetricsChs[i], err: test.scrapeErr} + scp, err := NewScraper(component.MustNewType("scraper"), tsm.scrape, scraperOptions...) + require.NoError(t, err) + + metricOptions = append(metricOptions, AddScraper(scp)) + } + + return metricOptions +} + +func getExpectedStartErr(test metricsTestCase) error { + return test.initializeErr +} + +func getExpectedShutdownErr(test metricsTestCase) error { + var errs error + + if test.closeErr != nil { + for i := 0; i < test.scrapers; i++ { + errs = multierr.Append(errs, test.closeErr) + } + } + + return errs +} + +func assertChannelsCalled(t *testing.T, chs []chan bool, message string) { + for _, ic := range chs { + assertChannelCalled(t, ic, message) + } +} + +func assertChannelCalled(t *testing.T, ch chan bool, message string) { + select { + case <-ch: + default: + assert.Fail(t, message) + } +} + +func assertReceiverSpan(t *testing.T, spans []sdktrace.ReadOnlySpan) { + receiverSpan := false + for _, span := range spans { + if span.Name() == "receiver/receiver/MetricsReceived" { + receiverSpan = true + break + } + } + assert.True(t, receiverSpan) +} + +func assertReceiverViews(t *testing.T, tt componenttest.TestTelemetry, sink *consumertest.MetricsSink) { + dataPointCount := 0 + for _, md := range sink.AllMetrics() { + dataPointCount += md.DataPointCount() + } + require.NoError(t, tt.CheckReceiverMetrics("", int64(dataPointCount), 0)) +} + +func assertScraperSpan(t *testing.T, expectedErr error, spans []sdktrace.ReadOnlySpan) { + expectedStatusCode := codes.Unset + expectedStatusMessage := "" + if expectedErr != nil { + expectedStatusCode = codes.Error + expectedStatusMessage = expectedErr.Error() + } + + scraperSpan := false + for _, span := range spans { + if span.Name() == "scraper/receiver/scraper/MetricsScraped" { + scraperSpan = true + assert.Equal(t, expectedStatusCode, span.Status().Code) + assert.Equal(t, expectedStatusMessage, span.Status().Description) + break + } + } + assert.True(t, scraperSpan) +} + +func assertScraperViews(t *testing.T, tt componenttest.TestTelemetry, expectedErr error, sink *consumertest.MetricsSink) { + expectedScraped := int64(sink.DataPointCount()) + expectedErrored := int64(0) + if expectedErr != nil { + var partialError scrapererror.PartialScrapeError + if errors.As(expectedErr, &partialError) { + expectedErrored = int64(partialError.Failed) + } else { + expectedScraped = int64(0) + expectedErrored = int64(sink.DataPointCount()) + } + } + + require.NoError(t, tt.CheckScraperMetrics(component.MustNewID("receiver"), component.MustNewID("scraper"), expectedScraped, expectedErrored)) +} + +func TestSingleScrapePerInterval(t *testing.T) { + scrapeMetricsCh := make(chan int, 10) + tsm := &testScrapeMetrics{ch: scrapeMetricsCh} + + cfg := newTestNoDelaySettings() + + tickerCh := make(chan time.Time) + + scp, err := NewScraper(component.MustNewType("scaper"), tsm.scrape) + require.NoError(t, err) + + receiver, err := NewScraperControllerReceiver( + cfg, + receivertest.NewNopSettings(), + new(consumertest.MetricsSink), + AddScraper(scp), + WithTickerChannel(tickerCh), + ) + require.NoError(t, err) + + require.NoError(t, receiver.Start(context.Background(), componenttest.NewNopHost())) + defer func() { require.NoError(t, receiver.Shutdown(context.Background())) }() + + tickerCh <- time.Now() + + assert.Eventually( + t, + func() bool { + return <-scrapeMetricsCh == 2 + }, + 300*time.Millisecond, + 100*time.Millisecond, + "Make sure the scraper channel is called twice", + ) + + select { + case <-scrapeMetricsCh: + assert.Fail(t, "Scrape was called more than twice") + case <-time.After(100 * time.Millisecond): + return + } +} + +func TestScrapeControllerStartsOnInit(t *testing.T) { + t.Parallel() + + tsm := &testScrapeMetrics{ + ch: make(chan int, 1), + } + + scp, err := NewScraper(component.MustNewType("scraper"), tsm.scrape) + require.NoError(t, err, "Must not error when creating scraper") + + r, err := NewScraperControllerReceiver( + &ControllerConfig{ + CollectionInterval: time.Hour, + InitialDelay: 0, + }, + receivertest.NewNopSettings(), + new(consumertest.MetricsSink), + AddScraper(scp), + ) + require.NoError(t, err, "Must not error when creating scrape controller") + + assert.NoError(t, r.Start(context.Background(), componenttest.NewNopHost()), "Must not error on start") + <-time.After(500 * time.Nanosecond) + require.NoError(t, r.Shutdown(context.Background()), "Must not have errored on shutdown") + assert.Equal(t, 1, tsm.timesScrapeCalled, "Must have been called as soon as the controller started") +} + +func TestScrapeControllerInitialDelay(t *testing.T) { + if testing.Short() { + t.Skip("This requires real time to pass, skipping") + return + } + + t.Parallel() + + var ( + elapsed = make(chan time.Time, 1) + cfg = ControllerConfig{ + CollectionInterval: time.Second, + InitialDelay: 300 * time.Millisecond, + } + ) + + scp, err := NewScraper(component.MustNewType("timed"), func(context.Context) (pmetric.Metrics, error) { + elapsed <- time.Now() + return pmetric.NewMetrics(), nil + }) + require.NoError(t, err, "Must not error when creating scraper") + + r, err := NewScraperControllerReceiver( + &cfg, + receivertest.NewNopSettings(), + new(consumertest.MetricsSink), + AddScraper(scp), + ) + require.NoError(t, err, "Must not error when creating receiver") + + t0 := time.Now() + require.NoError(t, r.Start(context.Background(), componenttest.NewNopHost()), "Must not error when starting") + t1 := <-elapsed + + assert.GreaterOrEqual(t, t1.Sub(t0), 300*time.Millisecond, "Must have had 300ms pass as defined by initial delay") + + assert.NoError(t, r.Shutdown(context.Background()), "Must not error closing down") +} diff --git a/receiver/scrapererror/doc.go b/receiver/scrapererror/doc.go index c893f0f8e89..cbf5ffff49e 100644 --- a/receiver/scrapererror/doc.go +++ b/receiver/scrapererror/doc.go @@ -1,5 +1,5 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - // Package scrapererror provides custom error types for scrapers. +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror // import "go.opentelemetry.io/collector/receiver/scrapererror" diff --git a/receiver/scrapererror/package_test.go b/receiver/scrapererror/package_test.go index b414afd1d34..c46c845b744 100644 --- a/receiver/scrapererror/package_test.go +++ b/receiver/scrapererror/package_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror import ( diff --git a/receiver/scrapererror/partialscrapeerror.go b/receiver/scrapererror/partialscrapeerror.go index cb4c03bab80..fae43c5fa04 100644 --- a/receiver/scrapererror/partialscrapeerror.go +++ b/receiver/scrapererror/partialscrapeerror.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror // import "go.opentelemetry.io/collector/receiver/scrapererror" import "errors" diff --git a/receiver/scrapererror/partialscrapeerror_test.go b/receiver/scrapererror/partialscrapeerror_test.go index d744a64abce..923d33937bc 100644 --- a/receiver/scrapererror/partialscrapeerror_test.go +++ b/receiver/scrapererror/partialscrapeerror_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror import ( diff --git a/receiver/scrapererror/scrapeerror.go b/receiver/scrapererror/scrapeerror.go index 9c1b4b08a20..42936749592 100644 --- a/receiver/scrapererror/scrapeerror.go +++ b/receiver/scrapererror/scrapeerror.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror // import "go.opentelemetry.io/collector/receiver/scrapererror" import ( diff --git a/receiver/scrapererror/scrapeerror_test.go b/receiver/scrapererror/scrapeerror_test.go index 7a47345013a..d2f75498681 100644 --- a/receiver/scrapererror/scrapeerror_test.go +++ b/receiver/scrapererror/scrapeerror_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scrapererror instead. package scrapererror import ( diff --git a/receiver/scraperhelper/config.go b/receiver/scraperhelper/config.go index 84b7cd2073e..e6d6e7c8a94 100644 --- a/receiver/scraperhelper/config.go +++ b/receiver/scraperhelper/config.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraperhelper" import ( diff --git a/receiver/scraperhelper/config_test.go b/receiver/scraperhelper/config_test.go index 1da08e78006..cfefdd04489 100644 --- a/receiver/scraperhelper/config_test.go +++ b/receiver/scraperhelper/config_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper import ( diff --git a/receiver/scraperhelper/doc.go b/receiver/scraperhelper/doc.go index a1e5cb26f1d..18206adf2fa 100644 --- a/receiver/scraperhelper/doc.go +++ b/receiver/scraperhelper/doc.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. //go:generate mdatagen metadata.yaml // Package scraperhelper provides utilities for scrapers. diff --git a/receiver/scraperhelper/obsreport.go b/receiver/scraperhelper/obsreport.go index cce7f49478e..7af76476573 100644 --- a/receiver/scraperhelper/obsreport.go +++ b/receiver/scraperhelper/obsreport.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraperhelper" import ( diff --git a/receiver/scraperhelper/obsreport_test.go b/receiver/scraperhelper/obsreport_test.go index 2240ce2eff6..fe0af14e3d4 100644 --- a/receiver/scraperhelper/obsreport_test.go +++ b/receiver/scraperhelper/obsreport_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper import ( diff --git a/receiver/scraperhelper/scraper.go b/receiver/scraperhelper/scraper.go index aa335af87f6..e7ff174aeae 100644 --- a/receiver/scraperhelper/scraper.go +++ b/receiver/scraperhelper/scraper.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraperhelper" import ( diff --git a/receiver/scraperhelper/scrapercontroller.go b/receiver/scraperhelper/scrapercontroller.go index 8767c608407..fb3997bebda 100644 --- a/receiver/scraperhelper/scrapercontroller.go +++ b/receiver/scraperhelper/scrapercontroller.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper // import "go.opentelemetry.io/collector/receiver/scraperhelper" import ( diff --git a/receiver/scraperhelper/scrapercontroller_test.go b/receiver/scraperhelper/scrapercontroller_test.go index 7d9fc8101e2..7ba565bce68 100644 --- a/receiver/scraperhelper/scrapercontroller_test.go +++ b/receiver/scraperhelper/scrapercontroller_test.go @@ -1,6 +1,6 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 - +// Deprecated: [v0.111.0] Use /scraper/scraperhelper instead. package scraperhelper import ( diff --git a/versions.yaml b/versions.yaml index bb9e7900eda..9f0275e3f42 100644 --- a/versions.yaml +++ b/versions.yaml @@ -65,6 +65,7 @@ module-sets: - go.opentelemetry.io/collector/processor/memorylimiterprocessor - go.opentelemetry.io/collector/processor/processorprofiles - go.opentelemetry.io/collector/receiver + - go.opentelemetry.io/collector/receiver/scraper - go.opentelemetry.io/collector/receiver/nopreceiver - go.opentelemetry.io/collector/receiver/otlpreceiver - go.opentelemetry.io/collector/receiver/receiverprofiles