From 2dc5560b1b4e6939e009e1a7c9c77b3470a11b2d Mon Sep 17 00:00:00 2001 From: sh0rez Date: Tue, 8 Oct 2024 12:10:34 +0200 Subject: [PATCH] [processor/deltatocumulative]: telemetry tests Tests internal telemetry (metadata.TelemetryBuilder) is recorded as expected. Introduces `internal/testing/sdktest` for this. Introduces `-- telemetry --` section to testdata. --- .../internal/data/datatest/equal.go | 2 +- .../internal/lineartelemetry/metrics.go | 9 +- .../datatest => testing}/compare/compare.go | 15 +- .../internal/testing/sdktest/compare.go | 201 ++++++++++++++++++ .../internal/testing/sdktest/example_test.go | 77 +++++++ .../internal/testing/sdktest/metrics.go | 128 +++++++++++ .../internal/{ => testing}/testar/decode.go | 8 +- .../{ => testing}/testar/read_test.go | 4 +- .../processor_test.go | 40 +++- .../testdata/limit/1.test | 8 + .../testdata/limit/2.test | 10 + .../testdata/notemporality-ignored/1.test | 19 ++ .../testdata/timestamps/1.test | 8 + .../testdata/tracking/1.test | 3 + 14 files changed, 511 insertions(+), 21 deletions(-) rename processor/deltatocumulativeprocessor/internal/{data/datatest => testing}/compare/compare.go (68%) create mode 100644 processor/deltatocumulativeprocessor/internal/testing/sdktest/compare.go create mode 100644 processor/deltatocumulativeprocessor/internal/testing/sdktest/example_test.go create mode 100644 processor/deltatocumulativeprocessor/internal/testing/sdktest/metrics.go rename processor/deltatocumulativeprocessor/internal/{ => testing}/testar/decode.go (92%) rename processor/deltatocumulativeprocessor/internal/{ => testing}/testar/read_test.go (89%) diff --git a/processor/deltatocumulativeprocessor/internal/data/datatest/equal.go b/processor/deltatocumulativeprocessor/internal/data/datatest/equal.go index 6e0ed0f7fcc10..c593d6b76f525 100644 --- a/processor/deltatocumulativeprocessor/internal/data/datatest/equal.go +++ b/processor/deltatocumulativeprocessor/internal/data/datatest/equal.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest/pmetrictest" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/data/datatest/compare" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/compare" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/data/expo" ) diff --git a/processor/deltatocumulativeprocessor/internal/lineartelemetry/metrics.go b/processor/deltatocumulativeprocessor/internal/lineartelemetry/metrics.go index c81068d75c796..7576883075c5e 100644 --- a/processor/deltatocumulativeprocessor/internal/lineartelemetry/metrics.go +++ b/processor/deltatocumulativeprocessor/internal/lineartelemetry/metrics.go @@ -16,12 +16,13 @@ import ( ) func New(set component.TelemetrySettings) (Metrics, error) { + zero := func() int { return -1 } m := Metrics{ - tracked: func() int { return 0 }, + tracked: &zero, } trackedCb := metadata.WithDeltatocumulativeStreamsTrackedLinearCallback(func() int64 { - return int64(m.tracked()) + return int64((*m.tracked)()) }) telb, err := metadata.NewTelemetryBuilder(set, trackedCb) @@ -36,7 +37,7 @@ func New(set component.TelemetrySettings) (Metrics, error) { type Metrics struct { metadata.TelemetryBuilder - tracked func() int + tracked *func() int } func (m Metrics) Datapoints() Counter { @@ -44,7 +45,7 @@ func (m Metrics) Datapoints() Counter { } func (m *Metrics) WithTracked(streams func() int) { - m.tracked = streams + *m.tracked = streams } func Error(msg string) attribute.KeyValue { diff --git a/processor/deltatocumulativeprocessor/internal/data/datatest/compare/compare.go b/processor/deltatocumulativeprocessor/internal/testing/compare/compare.go similarity index 68% rename from processor/deltatocumulativeprocessor/internal/data/datatest/compare/compare.go rename to processor/deltatocumulativeprocessor/internal/testing/compare/compare.go index eb8c0f11174aa..c272df9b3b83c 100644 --- a/processor/deltatocumulativeprocessor/internal/data/datatest/compare/compare.go +++ b/processor/deltatocumulativeprocessor/internal/testing/compare/compare.go @@ -1,7 +1,7 @@ // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 -package compare // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/data/datatest/compare" +package compare // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/compare" import ( "reflect" @@ -11,10 +11,21 @@ import ( "github.com/google/go-cmp/cmp/cmpopts" ) +var allow = []string{ + "go.opentelemetry.io/collector/pdata", + "go.opentelemetry.io/otel", + "github.com/open-telemetry/opentelemetry-collector-contrib", +} + var Opts = []cmp.Option{ cmpopts.EquateApprox(0, 1e-9), cmp.Exporter(func(ty reflect.Type) bool { - return strings.HasPrefix(ty.PkgPath(), "go.opentelemetry.io/collector/pdata") || strings.HasPrefix(ty.PkgPath(), "github.com/open-telemetry/opentelemetry-collector-contrib") + for _, prefix := range allow { + if strings.HasPrefix(ty.PkgPath(), prefix) { + return true + } + } + return false }), } diff --git a/processor/deltatocumulativeprocessor/internal/testing/sdktest/compare.go b/processor/deltatocumulativeprocessor/internal/testing/sdktest/compare.go new file mode 100644 index 0000000000000..39d7211016220 --- /dev/null +++ b/processor/deltatocumulativeprocessor/internal/testing/sdktest/compare.go @@ -0,0 +1,201 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +// sdktest performs partial comparison of [sdk.ResourceMetrics] to a [Spec]. +package sdktest // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/sdktest" + +import ( + stdcmp "cmp" + "context" + "fmt" + "slices" + + "github.com/google/go-cmp/cmp" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/sdk/metric" + sdk "go.opentelemetry.io/otel/sdk/metric/metricdata" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/compare" +) + +// Test the metrics returned by [metric.ManualReader.Collect] against the [Spec] +func Test(spec Spec, mr *metric.ManualReader, opts ...cmp.Option) error { + var rm sdk.ResourceMetrics + if err := mr.Collect(context.Background(), &rm); err != nil { + return err + } + return Compare(spec, rm, opts...) +} + +// Compare the [sdk.ResourceMetrics] against the [Spec] +func Compare(spec Spec, rm sdk.ResourceMetrics, opts ...cmp.Option) error { + got := make(map[string]sdk.Metrics) + for _, sm := range rm.ScopeMetrics { + for _, m := range sm.Metrics { + if _, ok := spec[m.Name]; ok { + got[m.Name] = sortData(m) + } + } + } + + want := make(map[string]sdk.Metrics) + for name, spec := range spec { + m := into(spec, got[name]) + want[name] = sortData(m) + } + + cmpfn := func(a, b sdk.Metrics) int { return stdcmp.Compare(a.Name, b.Name) } + mdgot := values(got, cmpfn) + mdwant := values(want, cmpfn) + + opts = append(opts, + cmp.Transformer("sdktest.Transform.int64", Transform[int64]), + cmp.Transformer("sdktest.Transform.float64", Transform[float64]), + // ignore attribute.Set while diffing, as we already compare the map[string]any returned by Transform + cmp.FilterValues(func(_, _ attribute.Set) bool { return true }, cmp.Ignore()), + ) + + if diff := compare.Diff(mdwant, mdgot, opts...); diff != "" { + return fmt.Errorf("\n%s", diff) + } + return nil +} + +func into(spec Metric, base sdk.Metrics) sdk.Metrics { + md := sdk.Metrics{Name: spec.Name, Description: base.Description, Unit: base.Unit} + + intSum := sdk.Sum[int64]{Temporality: spec.Temporality, IsMonotonic: spec.Monotonic} + floatSum := sdk.Sum[float64]{Temporality: spec.Temporality, IsMonotonic: spec.Monotonic} + intGauge := sdk.Gauge[int64]{} + floatGauge := sdk.Gauge[float64]{} + + var idps *[]sdk.DataPoint[int64] + var fdps *[]sdk.DataPoint[float64] + + switch spec.Type { + case TypeSum: + idps = &intSum.DataPoints + fdps = &floatSum.DataPoints + case TypeGauge: + idps = &intGauge.DataPoints + fdps = &floatGauge.DataPoints + default: + panic("todo") + } + + for _, num := range spec.Numbers { + attr := num.Attr.Into() + + switch { + case num.Int != nil: + dp := find[int64](base, attr) + dp.Value = *num.Int + *idps = append(*idps, dp) + case num.Float != nil: + dp := find[float64](base, attr) + dp.Value = *num.Float + *fdps = append(*fdps, dp) + } + } + + switch { + case len(intSum.DataPoints) > 0: + md.Data = intSum + case len(floatSum.DataPoints) > 0: + md.Data = floatSum + case len(intGauge.DataPoints) > 0: + md.Data = intGauge + case len(floatGauge.DataPoints) > 0: + md.Data = floatGauge + } + + return md +} + +func find[N num](base sdk.Metrics, set attribute.Set) sdk.DataPoint[N] { + var dps []sdk.DataPoint[N] + switch ty := base.Data.(type) { + case sdk.Sum[N]: + dps = ty.DataPoints + case sdk.Gauge[N]: + dps = ty.DataPoints + } + + for _, dp := range dps { + if dp.Attributes.Equals(&set) { + return dp + } + } + return sdk.DataPoint[N]{Attributes: set} +} + +type num interface { + int64 | float64 +} + +// DataPoint is like [sdk.DataPoint], but with the attributes as a plain +// map[string]any for better comparison. +type DataPoint[N num] struct { + sdk.DataPoint[N] + Attributes map[string]any +} + +// Transform is used with [cmp.Transformer] to transform [sdk.DataPoint] into [DataPoint] during comparison. +// +// This is done because the [attribute.Set] inside the datapoint does not diff +// properly, as it is too deeply nested and as such truncated by [cmp]. +func Transform[N num](dps []sdk.DataPoint[N]) []DataPoint[N] { + out := make([]DataPoint[N], len(dps)) + for i, dp := range dps { + attr := make(map[string]any) + for _, kv := range dp.Attributes.ToSlice() { + attr[string(kv.Key)] = kv.Value.AsInterface() + } + out[i] = DataPoint[N]{DataPoint: dp, Attributes: attr} + } + return out +} + +func keys[K stdcmp.Ordered, V any](m map[K]V) []K { + keys := make([]K, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + slices.SortStableFunc(keys, stdcmp.Compare) + return keys +} + +func values[K comparable, V any](m map[K]V, cmp func(V, V) int) []V { + vals := make([]V, 0, len(m)) + for _, v := range m { + vals = append(vals, v) + } + + slices.SortStableFunc(vals, cmp) + return vals +} + +func compareDp[N num](a, b sdk.DataPoint[N]) int { + return stdcmp.Compare( + a.Attributes.Encoded(attribute.DefaultEncoder()), + b.Attributes.Encoded(attribute.DefaultEncoder()), + ) +} + +func sortData(m sdk.Metrics) sdk.Metrics { + switch ty := m.Data.(type) { + case sdk.Sum[int64]: + slices.SortStableFunc(ty.DataPoints, compareDp[int64]) + m.Data = ty + case sdk.Sum[float64]: + slices.SortStableFunc(ty.DataPoints, compareDp[float64]) + m.Data = ty + case sdk.Gauge[int64]: + slices.SortStableFunc(ty.DataPoints, compareDp[int64]) + m.Data = ty + case sdk.Gauge[float64]: + slices.SortStableFunc(ty.DataPoints, compareDp[float64]) + m.Data = ty + } + return m +} diff --git a/processor/deltatocumulativeprocessor/internal/testing/sdktest/example_test.go b/processor/deltatocumulativeprocessor/internal/testing/sdktest/example_test.go new file mode 100644 index 0000000000000..e43957446cfce --- /dev/null +++ b/processor/deltatocumulativeprocessor/internal/testing/sdktest/example_test.go @@ -0,0 +1,77 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package sdktest + +import ( + "fmt" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric" + sdk "go.opentelemetry.io/otel/sdk/metric" +) + +// The output of [Test] and [Compare] is similar to the following: +// +// []metricdata.Metrics{ +// - { +// - Name: "not.exist", +// - Data: metricdata.Sum[float64]{ +// - DataPoints: []metricdata.DataPoint[float64]{{...}}, +// - Temporality: s"CumulativeTemporality", +// - }, +// - }, +// { +// Name: "requests.total", +// Description: "I will be inherited", +// Unit: "", +// Data: metricdata.Sum[int64]{ +// DataPoints: []metricdata.DataPoint[int64](Inverse(sdktest.Transform.int64, []sdktest.DataPoint[int64]{ +// {DataPoint: {StartTime: s"2024-10-11 11:23:37.966150738 +0200 CEST m=+0.001489569", Time: s"2024-10-11 11:23:37.966174238 +0200 CEST m=+0.001513070", Value: 20, ...}, Attributes: {}}, +// { +// DataPoint: metricdata.DataPoint[int64]{ +// ... // 1 ignored field +// StartTime: s"2024-10-11 11:23:37.966150738 +0200 CEST m=+0.001489569", +// Time: s"2024-10-11 11:23:37.966174238 +0200 CEST m=+0.001513070", +// - Value: 4, +// + Value: 3, +// Exemplars: nil, +// }, +// Attributes: {"error": string("limit")}, +// }, +// })), +// Temporality: s"CumulativeTemporality", +// IsMonotonic: true, +// }, +// }, +// } +// +// Which is used as follows: +func Example() { + var spec Spec + _ = Unmarshal([]byte(` +gauge streams.tracked: + - int: 40 + +counter requests.total: + - int: 20 + - int: 4 + attr: {error: "limit"} + +updown not.exist: + - float: 33.3 +`), &spec) + + mr := sdk.NewManualReader() + meter := sdk.NewMeterProvider(sdk.WithReader(mr)).Meter("test") + + gauge, _ := meter.Int64Gauge("streams.tracked") + gauge.Record(nil, 40) + + count, _ := meter.Int64Counter("requests.total", metric.WithDescription("I will be inherited")) + count.Add(nil, 20) + count.Add(nil, 3, metric.WithAttributes(attribute.String("error", "limit"))) + + err := Test(spec, mr) + fmt.Println(err) +} diff --git a/processor/deltatocumulativeprocessor/internal/testing/sdktest/metrics.go b/processor/deltatocumulativeprocessor/internal/testing/sdktest/metrics.go new file mode 100644 index 0000000000000..e69e240d8048d --- /dev/null +++ b/processor/deltatocumulativeprocessor/internal/testing/sdktest/metrics.go @@ -0,0 +1,128 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package sdktest // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/sdktest" + +import ( + "fmt" + "strings" + + "go.opentelemetry.io/otel/attribute" + sdk "go.opentelemetry.io/otel/sdk/metric/metricdata" + "gopkg.in/yaml.v3" +) + +// Spec is the partial metric specification. +// +// It only contains those fields required for testing. +// Other fields such as Time or Description are taken from the test output. +type Spec = map[string]Metric + +type Type string + +const ( + TypeSum Type = "sum" + TypeGauge Type = "gauge" +) + +type Metric struct { + Type + Name string + + Numbers []Number + Monotonic bool + Temporality sdk.Temporality +} + +type Number struct { + Int *int64 + Float *float64 + Attr attributes +} + +// Unmarshal specification in [Format] into the given [Spec]. +func Unmarshal(data Format, into *Spec) error { + var doc map[string]yaml.Node + if err := yaml.Unmarshal(data, &doc); err != nil { + return err + } + + if *into == nil { + *into = make(map[string]Metric, len(doc)) + } + md := *into + + for key, node := range doc { + args := strings.Fields(key) + if len(args) < 2 { + return fmt.Errorf("key must of form ' ', but got %q", key) + } + + m := Metric{Name: args[1]} + switch args[0] { + case "counter": + m.Type = TypeSum + m.Monotonic = true + case "updown": + m.Type = TypeSum + m.Monotonic = false + case "gauge": + m.Type = TypeGauge + default: + return fmt.Errorf("no such instrument type: %q", args[0]) + } + + m.Temporality = sdk.CumulativeTemporality + for _, arg := range args[2:] { + switch arg { + case "delta": + m.Temporality = sdk.DeltaTemporality + case "cumulative": + m.Temporality = sdk.CumulativeTemporality + } + } + + var into any + switch m.Type { + case TypeGauge, TypeSum: + into = &m.Numbers + default: + panic("unreachable") + } + + if err := node.Decode(into); err != nil { + return err + } + + md[m.Name] = m + } + + return nil +} + +type attributes map[string]string + +func (attr attributes) Into() attribute.Set { + kvs := make([]attribute.KeyValue, 0, len(attr)) + for k, v := range attr { + kvs = append(kvs, attribute.String(k, v)) + } + return attribute.NewSet(kvs...) +} + +// Format defines the yaml-based format to be used with [Unmarshal] for specifying [Spec]. +// +// It looks as follows: +// +// [ delta|cumulative ]: +// - int: | float: +// attr: +// [string]: +// +// The supported instruments are: +// - counter: [TypeSum], monotonic +// - updown: [TypeSum], non-monotonic +// - gauge: [TypeGauge] +// +// Temporality is optional and defaults to [sdk.CumulativeTemporality] +type Format = []byte diff --git a/processor/deltatocumulativeprocessor/internal/testar/decode.go b/processor/deltatocumulativeprocessor/internal/testing/testar/decode.go similarity index 92% rename from processor/deltatocumulativeprocessor/internal/testar/decode.go rename to processor/deltatocumulativeprocessor/internal/testing/testar/decode.go index 5141df958fbd5..a7e8e90837135 100644 --- a/processor/deltatocumulativeprocessor/internal/testar/decode.go +++ b/processor/deltatocumulativeprocessor/internal/testing/testar/decode.go @@ -16,7 +16,7 @@ // err := Read(data, &into) // // See [Read] and [Parser] for examples. -package testar // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testar" +package testar // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/testar" import ( "fmt" @@ -94,8 +94,10 @@ type Format struct { parse func(file []byte, into any) error } -func Parser(name string, fn func(data []byte, into any) error) Format { - return Format{name: name, parse: fn} +func Parser[T any](name string, fn func([]byte, *T) error) Format { + return Format{name: name, parse: func(file []byte, ptr any) error { + return fn(file, ptr.(*T)) + }} } // LiteralParser sets data unaltered into a []byte or string diff --git a/processor/deltatocumulativeprocessor/internal/testar/read_test.go b/processor/deltatocumulativeprocessor/internal/testing/testar/read_test.go similarity index 89% rename from processor/deltatocumulativeprocessor/internal/testar/read_test.go rename to processor/deltatocumulativeprocessor/internal/testing/testar/read_test.go index 6279ac35527e6..66ddbfa281a51 100644 --- a/processor/deltatocumulativeprocessor/internal/testar/read_test.go +++ b/processor/deltatocumulativeprocessor/internal/testing/testar/read_test.go @@ -42,12 +42,12 @@ func ExampleParser() { Foobar int `testar:"foobar,atoi"` } - _ = Read(data, &into, Parser("atoi", func(file []byte, into any) error { + _ = Read(data, &into, Parser("atoi", func(file []byte, into *int) error { n, err := strconv.Atoi(strings.TrimSpace(string(file))) if err != nil { return err } - *(into.(*int)) = n + *into = n return nil })) diff --git a/processor/deltatocumulativeprocessor/processor_test.go b/processor/deltatocumulativeprocessor/processor_test.go index 12d4452e621fb..b6c382ab6e40c 100644 --- a/processor/deltatocumulativeprocessor/processor_test.go +++ b/processor/deltatocumulativeprocessor/processor_test.go @@ -18,12 +18,12 @@ import ( "go.opentelemetry.io/collector/consumer/consumertest" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/processor" - "go.opentelemetry.io/collector/processor/processortest" "go.opentelemetry.io/otel/sdk/metric/metricdata" "gopkg.in/yaml.v3" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/data/datatest/compare" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testar" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/compare" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/sdktest" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor/internal/testing/testar" ) func TestProcessor(t *testing.T) { @@ -35,14 +35,18 @@ func TestProcessor(t *testing.T) { continue } + type Stage struct { In pmetric.Metrics `testar:"in,pmetric"` Out pmetric.Metrics `testar:"out,pmetric"` + + Sdk sdktest.Spec `testar:"telemetry,sdk"` } read := func(file string, into *Stage) error { return testar.ReadFile(file, into, testar.Parser("pmetric", unmarshalMetrics), + testar.Parser("sdk", sdktest.Unmarshal), ) } @@ -54,7 +58,9 @@ func TestProcessor(t *testing.T) { ctx := context.Background() cfg := config(t, file("config.yaml")) - proc, sink := setup(t, cfg) + + st := setup(t, cfg) + proc, sink := st.proc, st.sink stages, _ := filepath.Glob(file("*.test")) for _, file := range stages { @@ -70,6 +76,10 @@ func TestProcessor(t *testing.T) { if diff := compare.Diff(out, sink.AllMetrics()); diff != "" { t.Fatal(diff) } + + if err := sdktest.Test(stage.Sdk, st.tel.reader); err != nil { + t.Fatal(err) + } } }) @@ -89,7 +99,7 @@ func config(t *testing.T, file string) *Config { return cfg } -func setup(t *testing.T, cfg *Config) (processor.Metrics, *consumertest.MetricsSink) { +func setup(t *testing.T, cfg *Config) State { t.Helper() next := &consumertest.MetricsSink{} @@ -97,18 +107,30 @@ func setup(t *testing.T, cfg *Config) (processor.Metrics, *consumertest.MetricsS cfg = &Config{MaxStale: 0, MaxStreams: math.MaxInt} } + tt := setupTestTelemetry() proc, err := NewFactory().CreateMetrics( context.Background(), - processortest.NewNopSettings(), + tt.NewSettings(), cfg, next, ) require.NoError(t, err) - return proc, next + return State{ + proc: proc, + sink: next, + tel: tt, + } +} + +type State struct { + proc processor.Metrics + sink *consumertest.MetricsSink + + tel componentTestTelemetry } -func unmarshalMetrics(data []byte, into any) error { +func unmarshalMetrics(data []byte, into *pmetric.Metrics) error { var tmp any if err := yaml.Unmarshal(data, &tmp); err != nil { return err @@ -121,7 +143,7 @@ func unmarshalMetrics(data []byte, into any) error { if err != nil { return err } - *(into.(*pmetric.Metrics)) = md + *into = md return nil } diff --git a/processor/deltatocumulativeprocessor/testdata/limit/1.test b/processor/deltatocumulativeprocessor/testdata/limit/1.test index 0acad04bf3c64..cdc6d8a97f392 100644 --- a/processor/deltatocumulativeprocessor/testdata/limit/1.test +++ b/processor/deltatocumulativeprocessor/testdata/limit/1.test @@ -45,3 +45,11 @@ resourceMetrics: - {timeUnixNano: 1, asDouble: 1, attributes: [{key: series, value: {stringValue: "7"}}]} - {timeUnixNano: 1, asDouble: 1, attributes: [{key: series, value: {stringValue: "8"}}]} - {timeUnixNano: 1, asDouble: 1, attributes: [{key: series, value: {stringValue: "9"}}]} + +-- telemetry -- +counter otelcol_deltatocumulative.datapoints.linear: + - int: 10 + attr: {} + +updown otelcol_deltatocumulative.streams.tracked.linear: + - int: 10 diff --git a/processor/deltatocumulativeprocessor/testdata/limit/2.test b/processor/deltatocumulativeprocessor/testdata/limit/2.test index 20cd03a7db41f..236e518aee0a7 100644 --- a/processor/deltatocumulativeprocessor/testdata/limit/2.test +++ b/processor/deltatocumulativeprocessor/testdata/limit/2.test @@ -47,3 +47,13 @@ resourceMetrics: - {timeUnixNano: 2, asDouble: 2, attributes: [{key: series, value: {stringValue: "8"}}]} - {timeUnixNano: 2, asDouble: 2, attributes: [{key: series, value: {stringValue: "9"}}]} # - {timeUnixNano: 2, asDouble: 2, attributes: [{key: series, value: {stringValue: "x"}}]} # dropped + +-- telemetry -- +counter otelcol_deltatocumulative.datapoints.linear: + - int: 20 + attr: {} + - int: 1 + attr: {error: "limit"} + +updown otelcol_deltatocumulative.streams.tracked.linear: + - int: 10 diff --git a/processor/deltatocumulativeprocessor/testdata/notemporality-ignored/1.test b/processor/deltatocumulativeprocessor/testdata/notemporality-ignored/1.test index c7c743bcde309..556a59bae0f5d 100644 --- a/processor/deltatocumulativeprocessor/testdata/notemporality-ignored/1.test +++ b/processor/deltatocumulativeprocessor/testdata/notemporality-ignored/1.test @@ -14,6 +14,12 @@ resourceMetrics: - key: scopeattr value: { stringValue: string } metrics: + - name: test.sum + sum: + aggregationTemporality: 1 + dataPoints: + - timeUnixNano: 1 + asInt: 3 - name: test.gauge gauge: dataPoints: @@ -43,6 +49,12 @@ resourceMetrics: - key: scopeattr value: { stringValue: string } metrics: + - name: test.sum + sum: + aggregationTemporality: 2 + dataPoints: + - timeUnixNano: 1 + asInt: 3 - name: test.gauge gauge: dataPoints: @@ -55,3 +67,10 @@ resourceMetrics: quantileValues: - quantile: 0.25 value: 25 + +-- telemetry -- +counter otelcol_deltatocumulative.datapoints.linear: +- int: 1 + +updown otelcol_deltatocumulative.streams.tracked.linear: +- int: 1 diff --git a/processor/deltatocumulativeprocessor/testdata/timestamps/1.test b/processor/deltatocumulativeprocessor/testdata/timestamps/1.test index 4f6d48c54e369..cae1f8af87ab0 100644 --- a/processor/deltatocumulativeprocessor/testdata/timestamps/1.test +++ b/processor/deltatocumulativeprocessor/testdata/timestamps/1.test @@ -34,3 +34,11 @@ resourceMetrics: - {startTimeUnixNano: 1000, timeUnixNano: 1100, asDouble: 0} - {startTimeUnixNano: 1000, timeUnixNano: 1200, asDouble: 0} - {startTimeUnixNano: 1000, timeUnixNano: 1400, asDouble: 0} + +-- telemetry -- +counter otelcol_deltatocumulative.datapoints.linear: +- int: 3 +- attr: {error: "delta.ErrOutOfOrder"} + int: 1 +- attr: {error: "delta.ErrOlderStart"} + int: 1 diff --git a/processor/deltatocumulativeprocessor/testdata/tracking/1.test b/processor/deltatocumulativeprocessor/testdata/tracking/1.test index 76ab437989c2b..152fee2b5934f 100644 --- a/processor/deltatocumulativeprocessor/testdata/tracking/1.test +++ b/processor/deltatocumulativeprocessor/testdata/tracking/1.test @@ -374,3 +374,6 @@ resourceMetrics: - {key: "67ef", value: {stringValue: "4299"}} name: 58a7 version: 1cd0 + +-- telemetry -- +# skip