From bbdd281de58f83170500001089c99513bf257f21 Mon Sep 17 00:00:00 2001 From: Matthieu MOREL Date: Sat, 14 Dec 2024 15:26:59 +0100 Subject: [PATCH] [chore]: use testify instead of testing.Fatal or testing.Error in exporter --- .../alertmanager_exporter_test.go | 8 +-- .../metricsdata_to_logservice_test.go | 2 +- .../awsemfexporter/grouped_metric_test.go | 2 +- .../awsemfexporter/metric_translator_test.go | 10 +-- .../clickhouseexporter/exporter_logs_test.go | 5 +- .../internal/logs/sender_test.go | 5 +- .../metrics/sketches/sketches_test.go | 8 +-- .../datadogexporter/traces_exporter_test.go | 7 +-- .../logs_exporter_test.go | 2 +- exporter/logzioexporter/jsonlog_test.go | 8 +-- exporter/logzioexporter/logziospan_test.go | 29 +++------ exporter/lokiexporter/exporter_test.go | 2 +- exporter/mezmoexporter/exporter_test.go | 4 +- .../internal/arrow/exporter_test.go | 5 +- exporter/prometheusexporter/collector_test.go | 2 +- .../prometheusexporter/end_to_end_test.go | 36 +++-------- .../prometheusexporter/prometheus_test.go | 61 +++++++------------ .../exporter_test.go | 8 +-- .../helper_test.go | 6 +- exporter/pulsarexporter/marshaler_test.go | 6 +- .../internal/translation/converter_test.go | 5 +- .../internal/translation/translator_test.go | 4 +- .../splunkhecexporter/batchperscope_test.go | 2 +- exporter/splunkhecexporter/client_test.go | 16 ++--- 24 files changed, 80 insertions(+), 163 deletions(-) diff --git a/exporter/alertmanagerexporter/alertmanager_exporter_test.go b/exporter/alertmanagerexporter/alertmanager_exporter_test.go index 7e8161043f12..215fbda1c95b 100644 --- a/exporter/alertmanagerexporter/alertmanager_exporter_test.go +++ b/exporter/alertmanagerexporter/alertmanager_exporter_test.go @@ -293,9 +293,7 @@ func newMockServer(t *testing.T) *MockServer { _, errWrite := fmt.Fprint(w, "test") assert.NoError(t, errWrite) _, err := io.ReadAll(r.Body) - if err != nil { - t.Fatal(err) - } + assert.NoError(t, err) mock.fooCalledSuccessfully = true _, _ = w.Write([]byte("hello")) }) @@ -325,9 +323,7 @@ func TestAlertManagerPostAlert(t *testing.T) { err = am.postAlert(context.Background(), alerts) assert.NoError(t, err) - if mock.fooCalledSuccessfully == false { - t.Errorf("mock server wasn't called") - } + assert.True(t, mock.fooCalledSuccessfully, "mock server wasn't called") } func TestClientConfig(t *testing.T) { diff --git a/exporter/alibabacloudlogserviceexporter/metricsdata_to_logservice_test.go b/exporter/alibabacloudlogserviceexporter/metricsdata_to_logservice_test.go index 67e211517307..1bec6c28aa8c 100644 --- a/exporter/alibabacloudlogserviceexporter/metricsdata_to_logservice_test.go +++ b/exporter/alibabacloudlogserviceexporter/metricsdata_to_logservice_test.go @@ -120,7 +120,7 @@ func TestMetricDataToLogService(t *testing.T) { t.Errorf("Failed load log key value pairs from file, error: %v", err) return } - assert.Equal(t, len(wantLogs), len(gotLogs)) + assert.Len(t, gotLogs, len(wantLogs)) for j := 0; j < len(gotLogs); j++ { sort.Sort(logKeyValuePairs(gotLogPairs[j])) sort.Sort(logKeyValuePairs(wantLogs[j])) diff --git a/exporter/awsemfexporter/grouped_metric_test.go b/exporter/awsemfexporter/grouped_metric_test.go index 8688cfaaca03..fc50b5ab391a 100644 --- a/exporter/awsemfexporter/grouped_metric_test.go +++ b/exporter/awsemfexporter/grouped_metric_test.go @@ -118,7 +118,7 @@ func TestAddToGroupedMetric(t *testing.T) { assert.Len(t, groupedMetrics, 1) for _, v := range groupedMetrics { - assert.Equal(t, len(tc.expectedMetricInfo), len(v.metrics)) + assert.Len(t, v.metrics, len(tc.expectedMetricInfo)) assert.Equal(t, tc.expectedMetricInfo, v.metrics) assert.Len(t, v.labels, 2) assert.Equal(t, generateTestMetricMetadata(namespace, timestamp, logGroup, logStreamName, instrumentationLibName, tc.expectedMetricType), v.metadata) diff --git a/exporter/awsemfexporter/metric_translator_test.go b/exporter/awsemfexporter/metric_translator_test.go index 3a8f9268624c..5434c957048d 100644 --- a/exporter/awsemfexporter/metric_translator_test.go +++ b/exporter/awsemfexporter/metric_translator_test.go @@ -177,7 +177,7 @@ func hashMetricSlice(metricSlice []cWMetricInfo) []string { // assertDimsEqual asserts whether dimension sets are equal // (i.e. has same sets of dimensions), regardless of order. func assertDimsEqual(t *testing.T, expected, actual [][]string) { - assert.Equal(t, len(expected), len(actual)) + assert.Len(t, actual, len(expected)) expectedDimensions := normalizeDimensionality(expected) actualDimensions := normalizeDimensionality(actual) assert.Equal(t, expectedDimensions, actualDimensions) @@ -215,7 +215,7 @@ func assertCWMeasurementEqual(t *testing.T, expected, actual cWMeasurement) { assert.Equal(t, expected.Namespace, actual.Namespace) // Check metrics - assert.Equal(t, len(expected.Metrics), len(actual.Metrics)) + assert.Len(t, actual.Metrics, len(expected.Metrics)) expectedHashSlice := hashMetricSlice(expected.Metrics) actualHashSlice := hashMetricSlice(actual.Metrics) assert.Equal(t, expectedHashSlice, actualHashSlice) @@ -226,7 +226,7 @@ func assertCWMeasurementEqual(t *testing.T, expected, actual cWMeasurement) { // assertCWMeasurementSliceEqual asserts whether CW Measurements are equal, regardless of order. func assertCWMeasurementSliceEqual(t *testing.T, expected, actual []cWMeasurement) { - assert.Equal(t, len(expected), len(actual)) + assert.Len(t, actual, len(expected)) seen := make([]bool, len(expected)) for _, actualMeasurement := range actual { hasMatch := false @@ -246,7 +246,7 @@ func assertCWMeasurementSliceEqual(t *testing.T, expected, actual []cWMeasuremen func assertCWMetricsEqual(t *testing.T, expected, actual *cWMetrics) { assert.Equal(t, expected.timestampMs, actual.timestampMs) assert.Equal(t, expected.fields, actual.fields) - assert.Equal(t, len(expected.measurements), len(actual.measurements)) + assert.Len(t, actual.measurements, len(expected.measurements)) assertCWMeasurementSliceEqual(t, expected.measurements, actual.measurements) } @@ -1459,7 +1459,7 @@ func TestGroupedMetricToCWMeasurementsWithFilters(t *testing.T) { cWMeasurements := groupedMetricToCWMeasurementsWithFilters(groupedMetric, config) assert.NotNil(t, cWMeasurements) - assert.Equal(t, len(tc.expectedMeasurements), len(cWMeasurements)) + assert.Len(t, cWMeasurements, len(tc.expectedMeasurements)) assertCWMeasurementSliceEqual(t, tc.expectedMeasurements, cWMeasurements) }) } diff --git a/exporter/clickhouseexporter/exporter_logs_test.go b/exporter/clickhouseexporter/exporter_logs_test.go index bbd577206132..39e798dfd003 100644 --- a/exporter/clickhouseexporter/exporter_logs_test.go +++ b/exporter/clickhouseexporter/exporter_logs_test.go @@ -32,10 +32,7 @@ func TestLogsExporter_New(t *testing.T) { _ = func(want error) validate { return func(t *testing.T, exporter *logsExporter, err error) { require.Nil(t, exporter) - require.Error(t, err) - if !errors.Is(err, want) { - t.Fatalf("Expected error '%v', but got '%v'", want, err) - } + require.ErrorIs(t, err, want, "Expected error '%v', but got '%v'", want, err) } } diff --git a/exporter/datadogexporter/internal/logs/sender_test.go b/exporter/datadogexporter/internal/logs/sender_test.go index d1bd79901305..b13fe0c4bd61 100644 --- a/exporter/datadogexporter/internal/logs/sender_test.go +++ b/exporter/datadogexporter/internal/logs/sender_test.go @@ -13,6 +13,7 @@ import ( "github.com/DataDog/datadog-api-client-go/v2/api/datadog" "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/config/confighttp" "go.opentelemetry.io/collector/config/configtls" "go.uber.org/zap/zaptest" @@ -192,9 +193,7 @@ func TestSubmitLogs(t *testing.T) { }) defer server.Close() s := NewSender(server.URL, logger, confighttp.ClientConfig{Timeout: time.Second * 10, TLSSetting: configtls.ClientConfig{InsecureSkipVerify: true}}, true, "") - if err := s.SubmitLogs(context.Background(), tt.payload); err != nil { - t.Fatal(err) - } + require.NoError(t, s.SubmitLogs(context.Background(), tt.payload)) assert.Equal(t, calls, tt.numRequests) }) } diff --git a/exporter/datadogexporter/internal/metrics/sketches/sketches_test.go b/exporter/datadogexporter/internal/metrics/sketches/sketches_test.go index c454a7bb865f..1735636a5004 100644 --- a/exporter/datadogexporter/internal/metrics/sketches/sketches_test.go +++ b/exporter/datadogexporter/internal/metrics/sketches/sketches_test.go @@ -74,14 +74,10 @@ func TestSketchSeriesListMarshal(t *testing.T) { } b, err := sl.Marshal() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) pl := new(gogen.SketchPayload) - if err := pl.Unmarshal(b); err != nil { - t.Fatal(err) - } + require.NoError(t, pl.Unmarshal(b)) require.Len(t, pl.Sketches, len(sl)) diff --git a/exporter/datadogexporter/traces_exporter_test.go b/exporter/datadogexporter/traces_exporter_test.go index 54e4aaf73e4e..169276dd8c9d 100644 --- a/exporter/datadogexporter/traces_exporter_test.go +++ b/exporter/datadogexporter/traces_exporter_test.go @@ -116,11 +116,10 @@ func TestTracesSource(t *testing.T) { return } buf := new(bytes.Buffer) - if _, err := buf.ReadFrom(r.Body); err != nil { - t.Fatalf("Metrics server handler error: %v", err) - } + _, err := buf.ReadFrom(r.Body) + assert.NoError(t, err, "Metrics server handler error: %v", err) reqs <- buf.Bytes() - _, err := w.Write([]byte("{\"status\": \"ok\"}")) + _, err = w.Write([]byte("{\"status\": \"ok\"}")) assert.NoError(t, err) })) defer metricsServer.Close() diff --git a/exporter/honeycombmarkerexporter/logs_exporter_test.go b/exporter/honeycombmarkerexporter/logs_exporter_test.go index 403e43863a1b..bf79231ea80b 100644 --- a/exporter/honeycombmarkerexporter/logs_exporter_test.go +++ b/exporter/honeycombmarkerexporter/logs_exporter_test.go @@ -125,7 +125,7 @@ func TestExportMarkers(t *testing.T) { assert.NoError(t, err) - assert.Equal(t, len(tt.attributeMap), len(decodedBody)) + assert.Len(t, decodedBody, len(tt.attributeMap)) for attr := range tt.attributeMap { assert.Equal(t, tt.attributeMap[attr], decodedBody[attr]) diff --git a/exporter/logzioexporter/jsonlog_test.go b/exporter/logzioexporter/jsonlog_test.go index 9a495510e420..f7225b3db4f8 100644 --- a/exporter/logzioexporter/jsonlog_test.go +++ b/exporter/logzioexporter/jsonlog_test.go @@ -111,10 +111,6 @@ func TestSetTimeStamp(t *testing.T) { requests := strings.Split(string(decoded), "\n") require.NoError(t, json.Unmarshal([]byte(requests[0]), &jsonLog)) require.NoError(t, json.Unmarshal([]byte(requests[1]), &jsonLogNoTimestamp)) - if jsonLogNoTimestamp["@timestamp"] != nil { - t.Fatalf("did not expect @timestamp") - } - if jsonLog["@timestamp"] == nil { - t.Fatalf("@timestamp does not exist") - } + require.Nil(t, jsonLogNoTimestamp["@timestamp"], "did not expect @timestamp") + require.NotNil(t, jsonLog["@timestamp"], "@timestamp does not exist") } diff --git a/exporter/logzioexporter/logziospan_test.go b/exporter/logzioexporter/logziospan_test.go index 00a70aaafa00..faea671f4b40 100644 --- a/exporter/logzioexporter/logziospan_test.go +++ b/exporter/logzioexporter/logziospan_test.go @@ -10,13 +10,12 @@ import ( "testing" "github.com/jaegertracing/jaeger/model" + "github.com/stretchr/testify/require" ) func TestTransformToLogzioSpanBytes(tester *testing.T) { inStr, err := os.ReadFile("./testdata/span.json") - if err != nil { - tester.Fatalf("error opening sample span file %s", err.Error()) - } + require.NoError(tester, err, "error opening sample span file") var span model.Span err = json.Unmarshal(inStr, &span) @@ -24,14 +23,10 @@ func TestTransformToLogzioSpanBytes(tester *testing.T) { fmt.Println("json.Unmarshal") } newSpan, err := transformToLogzioSpanBytes(&span) - if err != nil { - tester.Fatal(err.Error()) - } + require.NoError(tester, err) m := make(map[string]any) err = json.Unmarshal(newSpan, &m) - if err != nil { - tester.Fatal(err.Error()) - } + require.NoError(tester, err) if _, ok := m["JaegerTag"]; !ok { tester.Error("error converting span to logzioSpan, JaegerTag is not found") } @@ -39,25 +34,17 @@ func TestTransformToLogzioSpanBytes(tester *testing.T) { func TestTransformToDbModelSpan(tester *testing.T) { inStr, err := os.ReadFile("./testdata/span.json") - if err != nil { - tester.Fatalf("error opening sample span file %s", err.Error()) - } + require.NoError(tester, err, "error opening sample span file") var span model.Span err = json.Unmarshal(inStr, &span) if err != nil { fmt.Println("json.Unmarshal") } newSpan, err := transformToLogzioSpanBytes(&span) - if err != nil { - tester.Fatal(err.Error()) - } + require.NoError(tester, err) var testLogzioSpan logzioSpan err = json.Unmarshal(newSpan, &testLogzioSpan) - if err != nil { - tester.Fatal(err.Error()) - } + require.NoError(tester, err) dbModelSpan := testLogzioSpan.transformToDbModelSpan() - if len(dbModelSpan.References) != 3 { - tester.Fatalf("Error converting logzio span to dbmodel span") - } + require.Len(tester, dbModelSpan.References, 3, "Error converting logzio span to dbmodel span") } diff --git a/exporter/lokiexporter/exporter_test.go b/exporter/lokiexporter/exporter_test.go index 37f29f29d3da..65179e609120 100644 --- a/exporter/lokiexporter/exporter_test.go +++ b/exporter/lokiexporter/exporter_test.go @@ -274,7 +274,7 @@ func TestLogsToLokiRequestWithGroupingByTenant(t *testing.T) { // actualPushRequest is populated within the test http server, we check it here as assertions are better done at the // end of the test function - assert.Equal(t, len(actualPushRequestPerTenant), len(tC.expected)) + assert.Len(t, tC.expected, len(actualPushRequestPerTenant)) for tenant, request := range actualPushRequestPerTenant { pr, ok := tC.expected[tenant] assert.True(t, ok) diff --git a/exporter/mezmoexporter/exporter_test.go b/exporter/mezmoexporter/exporter_test.go index 01588285c6dd..1e601f221050 100644 --- a/exporter/mezmoexporter/exporter_test.go +++ b/exporter/mezmoexporter/exporter_test.go @@ -122,9 +122,7 @@ type ( func createHTTPServer(params *testServerParams) testServer { httpServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { body, err := io.ReadAll(r.Body) - if err != nil { - params.t.Fatal(err) - } + assert.NoError(params.t, err) var logBody mezmoLogBody if err = json.Unmarshal(body, &logBody); err != nil { diff --git a/exporter/otelarrowexporter/internal/arrow/exporter_test.go b/exporter/otelarrowexporter/internal/arrow/exporter_test.go index e03e1b145aef..81d5c131adf7 100644 --- a/exporter/otelarrowexporter/internal/arrow/exporter_test.go +++ b/exporter/otelarrowexporter/internal/arrow/exporter_test.go @@ -922,9 +922,8 @@ func benchmarkPrioritizer(b *testing.B, numStreams int, pname PrioritizerName) { wg.Add(1) defer func() { - if err := tc.exporter.Shutdown(bg); err != nil { - b.Errorf("shutdown failed: %v", err) - } + err := tc.exporter.Shutdown(bg) + assert.NoError(b, err, "shutdown failed") wg.Done() wg.Wait() }() diff --git a/exporter/prometheusexporter/collector_test.go b/exporter/prometheusexporter/collector_test.go index 0552f2ec7135..d37a424c4712 100644 --- a/exporter/prometheusexporter/collector_test.go +++ b/exporter/prometheusexporter/collector_test.go @@ -689,7 +689,7 @@ func TestAccumulateHistograms(t *testing.T) { h := pbMetric.Histogram require.Equal(t, tt.histogramCount, h.GetSampleCount()) require.Equal(t, tt.histogramSum, h.GetSampleSum()) - require.Equal(t, len(tt.histogramPoints), len(h.Bucket)) + require.Len(t, h.Bucket, len(tt.histogramPoints)) for _, b := range h.Bucket { require.Equal(t, tt.histogramPoints[(*b).GetUpperBound()], b.GetCumulativeCount()) diff --git a/exporter/prometheusexporter/end_to_end_test.go b/exporter/prometheusexporter/end_to_end_test.go index f3d1b70899a4..75b418cef646 100644 --- a/exporter/prometheusexporter/end_to_end_test.go +++ b/exporter/prometheusexporter/end_to_end_test.go @@ -48,9 +48,7 @@ func TestEndToEndSummarySupport(t *testing.T) { defer dropWizardServer.Close() srvURL, err := url.Parse(dropWizardServer.URL) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -67,12 +65,8 @@ func TestEndToEndSummarySupport(t *testing.T) { exporterFactory := NewFactory() set := exportertest.NewNopSettings() exporter, err := exporterFactory.CreateMetrics(ctx, set, exporterCfg) - if err != nil { - t.Fatal(err) - } - if err = exporter.Start(ctx, nil); err != nil { - t.Fatalf("Failed to start the Prometheus exporter: %v", err) - } + require.NoError(t, err) + require.NoError(t, exporter.Start(ctx, nil), "Failed to start the Prometheus exporter: %v", err) t.Cleanup(func() { require.NoError(t, exporter.Shutdown(ctx)) }) // 3. Create the Prometheus receiver scraping from the DropWizard mock server and @@ -89,9 +83,7 @@ func TestEndToEndSummarySupport(t *testing.T) { - targets: ['%s'] `, srvURL.Host)) receiverConfig := new(prometheusreceiver.PromConfig) - if err = yaml.Unmarshal(yamlConfig, receiverConfig); err != nil { - t.Fatal(err) - } + require.NoError(t, yaml.Unmarshal(yamlConfig, receiverConfig)) receiverFactory := prometheusreceiver.NewFactory() receiverCreateSet := receivertest.NewNopSettings() @@ -100,26 +92,18 @@ func TestEndToEndSummarySupport(t *testing.T) { } // 3.5 Create the Prometheus receiver and pass in the previously created Prometheus exporter. prometheusReceiver, err := receiverFactory.CreateMetrics(ctx, receiverCreateSet, rcvCfg, exporter) - if err != nil { - t.Fatal(err) - } - if err = prometheusReceiver.Start(ctx, nil); err != nil { - t.Fatalf("Failed to start the Prometheus receiver: %v", err) - } + require.NoError(t, err) + require.NoError(t, prometheusReceiver.Start(ctx, nil), "Failed to start the Prometheus receiver: %v", err) t.Cleanup(func() { require.NoError(t, prometheusReceiver.Shutdown(ctx)) }) // 4. Scrape from the Prometheus receiver to ensure that we export summary metrics wg.Wait() res, err := http.Get("http://" + exporterCfg.Endpoint + "/metrics") - if err != nil { - t.Fatalf("Failed to scrape from the exporter: %v", err) - } + require.NoError(t, err, "Failed to scrape from the exporter: %v", err) prometheusExporterScrape, err := io.ReadAll(res.Body) res.Body.Close() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) // 5. Verify that we have the summary metrics and that their values make sense. wantLineRegexps := []string{ @@ -171,9 +155,7 @@ func TestEndToEndSummarySupport(t *testing.T) { // After this replacement, there should ONLY be newlines present. prometheusExporterScrape = bytes.ReplaceAll(prometheusExporterScrape, []byte("\n"), []byte("")) // Now assert that NO output was left over. - if len(prometheusExporterScrape) != 0 { - t.Fatalf("Left-over unmatched Prometheus scrape content: %q\n", prometheusExporterScrape) - } + require.Empty(t, prometheusExporterScrape, "Left-over unmatched Prometheus scrape content: %q\n", prometheusExporterScrape) } // the following triggers G101: Potential hardcoded credentials diff --git a/exporter/prometheusexporter/prometheus_test.go b/exporter/prometheusexporter/prometheus_test.go index 1103ab8114bf..3c4810342df3 100644 --- a/exporter/prometheusexporter/prometheus_test.go +++ b/exporter/prometheusexporter/prometheus_test.go @@ -8,7 +8,6 @@ import ( "fmt" "io" "net/http" - "strings" "testing" "time" @@ -149,9 +148,8 @@ func TestPrometheusExporter_WithTLS(t *testing.T) { rsp, err := httpClient.Get("https://localhost:7777/metrics") require.NoError(t, err, "Failed to perform a scrape") - if g, w := rsp.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := rsp.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(rsp.Body) _ = rsp.Body.Close() @@ -164,9 +162,7 @@ func TestPrometheusExporter_WithTLS(t *testing.T) { } for _, w := range want { - if !strings.Contains(string(blob), w) { - t.Errorf("Missing %v from response:\n%v", w, string(blob)) - } + assert.Contains(t, string(blob), w, "Missing %v from response:\n%v", w, string(blob)) } } @@ -208,9 +204,8 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) { res, err1 := http.Get("http://localhost:7777/metrics") require.NoError(t, err1, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() want := []string{ @@ -229,9 +224,7 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) { } for _, w := range want { - if !strings.Contains(string(blob), w) { - t.Errorf("Missing %v from response:\n%v", w, string(blob)) - } + assert.Contains(t, string(blob), w, "Missing %v from response:\n%v", w, string(blob)) } } @@ -242,9 +235,8 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) { res, err := http.Get("http://localhost:7777/metrics") require.NoError(t, err, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() require.Emptyf(t, string(blob), "Metrics did not expire") @@ -285,9 +277,8 @@ func TestPrometheusExporter_endToEnd(t *testing.T) { res, err1 := http.Get("http://localhost:7777/metrics") require.NoError(t, err1, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() want := []string{ @@ -302,9 +293,7 @@ func TestPrometheusExporter_endToEnd(t *testing.T) { } for _, w := range want { - if !strings.Contains(string(blob), w) { - t.Errorf("Missing %v from response:\n%v", w, string(blob)) - } + assert.Contains(t, string(blob), w, "Missing %v from response:\n%v", w, string(blob)) } } @@ -315,9 +304,8 @@ func TestPrometheusExporter_endToEnd(t *testing.T) { res, err := http.Get("http://localhost:7777/metrics") require.NoError(t, err, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() require.Emptyf(t, string(blob), "Metrics did not expire") @@ -359,9 +347,8 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) { res, err1 := http.Get("http://localhost:7777/metrics") require.NoError(t, err1, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() want := []string{ @@ -376,9 +363,7 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) { } for _, w := range want { - if !strings.Contains(string(blob), w) { - t.Errorf("Missing %v from response:\n%v", w, string(blob)) - } + assert.Contains(t, string(blob), w, "Missing %v from response:\n%v", w, string(blob)) } } @@ -389,9 +374,8 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) { res, err := http.Get("http://localhost:7777/metrics") require.NoError(t, err, "Failed to perform a scrape") - if g, w := res.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := res.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(res.Body) _ = res.Body.Close() require.Emptyf(t, string(blob), "Metrics did not expire") @@ -434,9 +418,8 @@ func TestPrometheusExporter_endToEndWithResource(t *testing.T) { rsp, err := http.Get("http://localhost:7777/metrics") require.NoError(t, err, "Failed to perform a scrape") - if g, w := rsp.StatusCode, 200; g != w { - t.Errorf("Mismatched HTTP response status code: Got: %d Want: %d", g, w) - } + g, w := rsp.StatusCode, 200 + assert.Equal(t, g, w, "Mismatched HTTP response status code: Got: %d Want: %d", g, w) blob, _ := io.ReadAll(rsp.Body) _ = rsp.Body.Close() @@ -449,9 +432,7 @@ func TestPrometheusExporter_endToEndWithResource(t *testing.T) { } for _, w := range want { - if !strings.Contains(string(blob), w) { - t.Errorf("Missing %v from response:\n%v", w, string(blob)) - } + assert.Contains(t, string(blob), w, "Missing %v from response:\n%v", w, string(blob)) } } diff --git a/exporter/prometheusremotewriteexporter/exporter_test.go b/exporter/prometheusremotewriteexporter/exporter_test.go index 5fbe3ef237ab..864e85323bdb 100644 --- a/exporter/prometheusremotewriteexporter/exporter_test.go +++ b/exporter/prometheusremotewriteexporter/exporter_test.go @@ -260,9 +260,7 @@ func Test_export(t *testing.T) { // The following is a handler function that reads the sent httpRequest, unmarshal, and checks if the WriteRequest // preserves the TimeSeries data correctly body, err := io.ReadAll(r.Body) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) require.NotNil(t, body) // Receives the http requests and unzip, unmarshalls, and extracts TimeSeries assert.Equal(t, "0.1.0", r.Header.Get("X-Prometheus-Remote-Write-Version")) @@ -452,9 +450,7 @@ func Test_PushMetrics(t *testing.T) { checkFunc := func(t *testing.T, r *http.Request, expected int, isStaleMarker bool) { body, err := io.ReadAll(r.Body) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err) buf := make([]byte, len(body)) dest, err := snappy.Decode(buf, body) diff --git a/exporter/prometheusremotewriteexporter/helper_test.go b/exporter/prometheusremotewriteexporter/helper_test.go index f464c25071b0..e7ce715ac5be 100644 --- a/exporter/prometheusremotewriteexporter/helper_test.go +++ b/exporter/prometheusremotewriteexporter/helper_test.go @@ -242,10 +242,8 @@ func TestEnsureTimeseriesPointsAreSortedByTimestamp(t *testing.T) { si := ts.Samples[i] for j := 0; j < i; j++ { sj := ts.Samples[j] - if sj.Timestamp > si.Timestamp { - t.Errorf("Timeseries[%d]: Sample[%d].Timestamp(%d) > Sample[%d].Timestamp(%d)", - ti, j, sj.Timestamp, i, si.Timestamp) - } + assert.LessOrEqual(t, sj.Timestamp, si.Timestamp, "Timeseries[%d]: Sample[%d].Timestamp(%d) > Sample[%d].Timestamp(%d)", + ti, j, sj.Timestamp, i, si.Timestamp) } } } diff --git a/exporter/pulsarexporter/marshaler_test.go b/exporter/pulsarexporter/marshaler_test.go index d84f69c89e02..82f10b97bf10 100644 --- a/exporter/pulsarexporter/marshaler_test.go +++ b/exporter/pulsarexporter/marshaler_test.go @@ -24,7 +24,7 @@ func TestDefaultTracesMarshalers(t *testing.T) { "jaeger_json", } marshalers := tracesMarshalers() - assert.Equal(t, len(expectedEncodings), len(marshalers)) + assert.Len(t, marshalers, len(expectedEncodings)) for _, e := range expectedEncodings { t.Run(e, func(t *testing.T) { m, ok := marshalers[e] @@ -40,7 +40,7 @@ func TestDefaultMetricsMarshalers(t *testing.T) { "otlp_json", } marshalers := metricsMarshalers() - assert.Equal(t, len(expectedEncodings), len(marshalers)) + assert.Len(t, marshalers, len(expectedEncodings)) for _, e := range expectedEncodings { t.Run(e, func(t *testing.T) { m, ok := marshalers[e] @@ -56,7 +56,7 @@ func TestDefaultLogsMarshalers(t *testing.T) { "otlp_json", } marshalers := logsMarshalers() - assert.Equal(t, len(expectedEncodings), len(marshalers)) + assert.Len(t, marshalers, len(expectedEncodings)) for _, e := range expectedEncodings { t.Run(e, func(t *testing.T) { m, ok := marshalers[e] diff --git a/exporter/signalfxexporter/internal/translation/converter_test.go b/exporter/signalfxexporter/internal/translation/converter_test.go index 55f91da8bd18..ce1c7edaa757 100644 --- a/exporter/signalfxexporter/internal/translation/converter_test.go +++ b/exporter/signalfxexporter/internal/translation/converter_test.go @@ -1358,9 +1358,8 @@ func TestMetricsConverter_ConvertDimension(t *testing.T) { t.Run(tt.name, func(t *testing.T) { c, err := NewMetricsConverter(zap.NewNop(), tt.fields.metricTranslator, nil, nil, tt.fields.nonAlphanumericDimChars, false, true) require.NoError(t, err) - if got := c.ConvertDimension(tt.args.dim); got != tt.want { - t.Errorf("ConvertDimension() = %v, want %v", got, tt.want) - } + got := c.ConvertDimension(tt.args.dim) + assert.Equal(t, tt.want, got, "ConvertDimension() = %v, want %v", got, tt.want) }) } } diff --git a/exporter/signalfxexporter/internal/translation/translator_test.go b/exporter/signalfxexporter/internal/translation/translator_test.go index 23fa4194074f..54d4b876a70b 100644 --- a/exporter/signalfxexporter/internal/translation/translator_test.go +++ b/exporter/signalfxexporter/internal/translation/translator_test.go @@ -2564,7 +2564,7 @@ func requireDeltaMetricOk(t *testing.T, md1, md2, md3 pmetric.Metrics) ( deltaPts1, ok := m2["system.cpu.delta"] require.True(t, ok) - require.Equal(t, len(origPts), len(deltaPts1)) + require.Len(t, deltaPts1, len(origPts)) counterType := sfxpb.MetricType_GAUGE for _, pt := range deltaPts1 { require.Equal(t, &counterType, pt.MetricType) @@ -2576,7 +2576,7 @@ func requireDeltaMetricOk(t *testing.T, md1, md2, md3 pmetric.Metrics) ( deltaPts2, ok := m3["system.cpu.delta"] require.True(t, ok) - require.Equal(t, len(origPts), len(deltaPts2)) + require.Len(t, deltaPts2, len(origPts)) for _, pt := range deltaPts2 { require.Equal(t, &counterType, pt.MetricType) } diff --git a/exporter/splunkhecexporter/batchperscope_test.go b/exporter/splunkhecexporter/batchperscope_test.go index 3608bb4ee5c3..4a3924b95555 100644 --- a/exporter/splunkhecexporter/batchperscope_test.go +++ b/exporter/splunkhecexporter/batchperscope_test.go @@ -115,7 +115,7 @@ func TestBatchLogs_ConsumeLogs(t *testing.T) { err = consumer.ConsumeLogs(context.Background(), logs) assert.NoError(t, err) - require.Equal(t, len(tt.out), len(sink.AllLogs())) + require.Len(t, sink.AllLogs(), len(tt.out)) for i, out := range tt.out { expected, err := golden.ReadLogs("testdata/batchperscope/" + out) require.NoError(t, err) diff --git a/exporter/splunkhecexporter/client_test.go b/exporter/splunkhecexporter/client_test.go index 0f4ccd1e349f..28ce3ca540d2 100644 --- a/exporter/splunkhecexporter/client_test.go +++ b/exporter/splunkhecexporter/client_test.go @@ -194,8 +194,8 @@ type capturingData struct { func (c *capturingData) ServeHTTP(w http.ResponseWriter, r *http.Request) { body, err := io.ReadAll(r.Body) - if c.checkCompression && r.Header.Get("Content-Encoding") != "gzip" { - c.testing.Fatal("No compression") + if c.checkCompression { + assert.Equal(c.testing, "gzip", r.Header.Get("Content-Encoding"), "No compression") } if err != nil { @@ -535,9 +535,7 @@ func TestReceiveTracesBatches(t *testing.T) { } timeStr := fmt.Sprintf(`"time":%d,`, i+1) if strings.Contains(string(batchBody), timeStr) { - if eventFound { - t.Errorf("span event %d found in multiple batches", i) - } + assert.False(t, eventFound, "span event %d found in multiple batches", i) eventFound = true } } @@ -827,9 +825,7 @@ func TestReceiveLogs(t *testing.T) { require.NoError(t, err) } if strings.Contains(string(batchBody), fmt.Sprintf(`"%s"`, attrVal.Str())) { - if eventFound { - t.Errorf("log event %s found in multiple batches", attrVal.Str()) - } + assert.False(t, eventFound, "log event %s found in multiple batches", attrVal.Str()) eventFound = true droppedCount-- } @@ -1203,9 +1199,7 @@ func TestReceiveBatchedMetrics(t *testing.T) { } time := float64(i) + 0.001*float64(i) if strings.Contains(string(batchBody), fmt.Sprintf(`"time":%g`, time)) { - if eventFound { - t.Errorf("metric event %d found in multiple batches", i) - } + assert.False(t, eventFound, "metric event %d found in multiple batches", i) eventFound = true } }