Skip to content

Commit

Permalink
[chore]: use testify instead of testing.Fatal or testing.Error in exp…
Browse files Browse the repository at this point in the history
…orter
  • Loading branch information
mmorel-35 committed Dec 14, 2024
1 parent 4c33430 commit bbdd281
Show file tree
Hide file tree
Showing 24 changed files with 80 additions and 163 deletions.
8 changes: 2 additions & 6 deletions exporter/alertmanagerexporter/alertmanager_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,7 @@ func newMockServer(t *testing.T) *MockServer {
_, errWrite := fmt.Fprint(w, "test")
assert.NoError(t, errWrite)
_, err := io.ReadAll(r.Body)
if err != nil {
t.Fatal(err)
}
assert.NoError(t, err)
mock.fooCalledSuccessfully = true
_, _ = w.Write([]byte("hello"))
})
Expand Down Expand Up @@ -325,9 +323,7 @@ func TestAlertManagerPostAlert(t *testing.T) {

err = am.postAlert(context.Background(), alerts)
assert.NoError(t, err)
if mock.fooCalledSuccessfully == false {
t.Errorf("mock server wasn't called")
}
assert.True(t, mock.fooCalledSuccessfully, "mock server wasn't called")
}

func TestClientConfig(t *testing.T) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ func TestMetricDataToLogService(t *testing.T) {
t.Errorf("Failed load log key value pairs from file, error: %v", err)
return
}
assert.Equal(t, len(wantLogs), len(gotLogs))
assert.Len(t, gotLogs, len(wantLogs))
for j := 0; j < len(gotLogs); j++ {
sort.Sort(logKeyValuePairs(gotLogPairs[j]))
sort.Sort(logKeyValuePairs(wantLogs[j]))
Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/grouped_metric_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ func TestAddToGroupedMetric(t *testing.T) {

assert.Len(t, groupedMetrics, 1)
for _, v := range groupedMetrics {
assert.Equal(t, len(tc.expectedMetricInfo), len(v.metrics))
assert.Len(t, v.metrics, len(tc.expectedMetricInfo))
assert.Equal(t, tc.expectedMetricInfo, v.metrics)
assert.Len(t, v.labels, 2)
assert.Equal(t, generateTestMetricMetadata(namespace, timestamp, logGroup, logStreamName, instrumentationLibName, tc.expectedMetricType), v.metadata)
Expand Down
10 changes: 5 additions & 5 deletions exporter/awsemfexporter/metric_translator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ func hashMetricSlice(metricSlice []cWMetricInfo) []string {
// assertDimsEqual asserts whether dimension sets are equal
// (i.e. has same sets of dimensions), regardless of order.
func assertDimsEqual(t *testing.T, expected, actual [][]string) {
assert.Equal(t, len(expected), len(actual))
assert.Len(t, actual, len(expected))
expectedDimensions := normalizeDimensionality(expected)
actualDimensions := normalizeDimensionality(actual)
assert.Equal(t, expectedDimensions, actualDimensions)
Expand Down Expand Up @@ -215,7 +215,7 @@ func assertCWMeasurementEqual(t *testing.T, expected, actual cWMeasurement) {
assert.Equal(t, expected.Namespace, actual.Namespace)

// Check metrics
assert.Equal(t, len(expected.Metrics), len(actual.Metrics))
assert.Len(t, actual.Metrics, len(expected.Metrics))
expectedHashSlice := hashMetricSlice(expected.Metrics)
actualHashSlice := hashMetricSlice(actual.Metrics)
assert.Equal(t, expectedHashSlice, actualHashSlice)
Expand All @@ -226,7 +226,7 @@ func assertCWMeasurementEqual(t *testing.T, expected, actual cWMeasurement) {

// assertCWMeasurementSliceEqual asserts whether CW Measurements are equal, regardless of order.
func assertCWMeasurementSliceEqual(t *testing.T, expected, actual []cWMeasurement) {
assert.Equal(t, len(expected), len(actual))
assert.Len(t, actual, len(expected))
seen := make([]bool, len(expected))
for _, actualMeasurement := range actual {
hasMatch := false
Expand All @@ -246,7 +246,7 @@ func assertCWMeasurementSliceEqual(t *testing.T, expected, actual []cWMeasuremen
func assertCWMetricsEqual(t *testing.T, expected, actual *cWMetrics) {
assert.Equal(t, expected.timestampMs, actual.timestampMs)
assert.Equal(t, expected.fields, actual.fields)
assert.Equal(t, len(expected.measurements), len(actual.measurements))
assert.Len(t, actual.measurements, len(expected.measurements))
assertCWMeasurementSliceEqual(t, expected.measurements, actual.measurements)
}

Expand Down Expand Up @@ -1459,7 +1459,7 @@ func TestGroupedMetricToCWMeasurementsWithFilters(t *testing.T) {

cWMeasurements := groupedMetricToCWMeasurementsWithFilters(groupedMetric, config)
assert.NotNil(t, cWMeasurements)
assert.Equal(t, len(tc.expectedMeasurements), len(cWMeasurements))
assert.Len(t, cWMeasurements, len(tc.expectedMeasurements))
assertCWMeasurementSliceEqual(t, tc.expectedMeasurements, cWMeasurements)
})
}
Expand Down
5 changes: 1 addition & 4 deletions exporter/clickhouseexporter/exporter_logs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@ func TestLogsExporter_New(t *testing.T) {
_ = func(want error) validate {
return func(t *testing.T, exporter *logsExporter, err error) {
require.Nil(t, exporter)
require.Error(t, err)
if !errors.Is(err, want) {
t.Fatalf("Expected error '%v', but got '%v'", want, err)
}
require.ErrorIs(t, err, want, "Expected error '%v', but got '%v'", want, err)
}
}

Expand Down
5 changes: 2 additions & 3 deletions exporter/datadogexporter/internal/logs/sender_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV2"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/config/confighttp"
"go.opentelemetry.io/collector/config/configtls"
"go.uber.org/zap/zaptest"
Expand Down Expand Up @@ -192,9 +193,7 @@ func TestSubmitLogs(t *testing.T) {
})
defer server.Close()
s := NewSender(server.URL, logger, confighttp.ClientConfig{Timeout: time.Second * 10, TLSSetting: configtls.ClientConfig{InsecureSkipVerify: true}}, true, "")
if err := s.SubmitLogs(context.Background(), tt.payload); err != nil {
t.Fatal(err)
}
require.NoError(t, s.SubmitLogs(context.Background(), tt.payload))
assert.Equal(t, calls, tt.numRequests)
})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,14 +74,10 @@ func TestSketchSeriesListMarshal(t *testing.T) {
}

b, err := sl.Marshal()
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)

pl := new(gogen.SketchPayload)
if err := pl.Unmarshal(b); err != nil {
t.Fatal(err)
}
require.NoError(t, pl.Unmarshal(b))

require.Len(t, pl.Sketches, len(sl))

Expand Down
7 changes: 3 additions & 4 deletions exporter/datadogexporter/traces_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,10 @@ func TestTracesSource(t *testing.T) {
return
}
buf := new(bytes.Buffer)
if _, err := buf.ReadFrom(r.Body); err != nil {
t.Fatalf("Metrics server handler error: %v", err)
}
_, err := buf.ReadFrom(r.Body)
assert.NoError(t, err, "Metrics server handler error: %v", err)
reqs <- buf.Bytes()
_, err := w.Write([]byte("{\"status\": \"ok\"}"))
_, err = w.Write([]byte("{\"status\": \"ok\"}"))
assert.NoError(t, err)
}))
defer metricsServer.Close()
Expand Down
2 changes: 1 addition & 1 deletion exporter/honeycombmarkerexporter/logs_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ func TestExportMarkers(t *testing.T) {

assert.NoError(t, err)

assert.Equal(t, len(tt.attributeMap), len(decodedBody))
assert.Len(t, decodedBody, len(tt.attributeMap))

for attr := range tt.attributeMap {
assert.Equal(t, tt.attributeMap[attr], decodedBody[attr])
Expand Down
8 changes: 2 additions & 6 deletions exporter/logzioexporter/jsonlog_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,6 @@ func TestSetTimeStamp(t *testing.T) {
requests := strings.Split(string(decoded), "\n")
require.NoError(t, json.Unmarshal([]byte(requests[0]), &jsonLog))
require.NoError(t, json.Unmarshal([]byte(requests[1]), &jsonLogNoTimestamp))
if jsonLogNoTimestamp["@timestamp"] != nil {
t.Fatalf("did not expect @timestamp")
}
if jsonLog["@timestamp"] == nil {
t.Fatalf("@timestamp does not exist")
}
require.Nil(t, jsonLogNoTimestamp["@timestamp"], "did not expect @timestamp")
require.NotNil(t, jsonLog["@timestamp"], "@timestamp does not exist")
}
29 changes: 8 additions & 21 deletions exporter/logzioexporter/logziospan_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,54 +10,41 @@ import (
"testing"

"github.com/jaegertracing/jaeger/model"
"github.com/stretchr/testify/require"
)

func TestTransformToLogzioSpanBytes(tester *testing.T) {
inStr, err := os.ReadFile("./testdata/span.json")
if err != nil {
tester.Fatalf("error opening sample span file %s", err.Error())
}
require.NoError(tester, err, "error opening sample span file")

var span model.Span
err = json.Unmarshal(inStr, &span)
if err != nil {
fmt.Println("json.Unmarshal")
}
newSpan, err := transformToLogzioSpanBytes(&span)
if err != nil {
tester.Fatal(err.Error())
}
require.NoError(tester, err)
m := make(map[string]any)
err = json.Unmarshal(newSpan, &m)
if err != nil {
tester.Fatal(err.Error())
}
require.NoError(tester, err)
if _, ok := m["JaegerTag"]; !ok {
tester.Error("error converting span to logzioSpan, JaegerTag is not found")
}
}

func TestTransformToDbModelSpan(tester *testing.T) {
inStr, err := os.ReadFile("./testdata/span.json")
if err != nil {
tester.Fatalf("error opening sample span file %s", err.Error())
}
require.NoError(tester, err, "error opening sample span file")
var span model.Span
err = json.Unmarshal(inStr, &span)
if err != nil {
fmt.Println("json.Unmarshal")
}
newSpan, err := transformToLogzioSpanBytes(&span)
if err != nil {
tester.Fatal(err.Error())
}
require.NoError(tester, err)
var testLogzioSpan logzioSpan
err = json.Unmarshal(newSpan, &testLogzioSpan)
if err != nil {
tester.Fatal(err.Error())
}
require.NoError(tester, err)
dbModelSpan := testLogzioSpan.transformToDbModelSpan()
if len(dbModelSpan.References) != 3 {
tester.Fatalf("Error converting logzio span to dbmodel span")
}
require.Len(tester, dbModelSpan.References, 3, "Error converting logzio span to dbmodel span")
}
2 changes: 1 addition & 1 deletion exporter/lokiexporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ func TestLogsToLokiRequestWithGroupingByTenant(t *testing.T) {

// actualPushRequest is populated within the test http server, we check it here as assertions are better done at the
// end of the test function
assert.Equal(t, len(actualPushRequestPerTenant), len(tC.expected))
assert.Len(t, tC.expected, len(actualPushRequestPerTenant))
for tenant, request := range actualPushRequestPerTenant {
pr, ok := tC.expected[tenant]
assert.True(t, ok)
Expand Down
4 changes: 1 addition & 3 deletions exporter/mezmoexporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,7 @@ type (
func createHTTPServer(params *testServerParams) testServer {
httpServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
body, err := io.ReadAll(r.Body)
if err != nil {
params.t.Fatal(err)
}
assert.NoError(params.t, err)

var logBody mezmoLogBody
if err = json.Unmarshal(body, &logBody); err != nil {
Expand Down
5 changes: 2 additions & 3 deletions exporter/otelarrowexporter/internal/arrow/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -922,9 +922,8 @@ func benchmarkPrioritizer(b *testing.B, numStreams int, pname PrioritizerName) {

wg.Add(1)
defer func() {
if err := tc.exporter.Shutdown(bg); err != nil {
b.Errorf("shutdown failed: %v", err)
}
err := tc.exporter.Shutdown(bg)
assert.NoError(b, err, "shutdown failed")
wg.Done()
wg.Wait()
}()
Expand Down
2 changes: 1 addition & 1 deletion exporter/prometheusexporter/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -689,7 +689,7 @@ func TestAccumulateHistograms(t *testing.T) {
h := pbMetric.Histogram
require.Equal(t, tt.histogramCount, h.GetSampleCount())
require.Equal(t, tt.histogramSum, h.GetSampleSum())
require.Equal(t, len(tt.histogramPoints), len(h.Bucket))
require.Len(t, h.Bucket, len(tt.histogramPoints))

for _, b := range h.Bucket {
require.Equal(t, tt.histogramPoints[(*b).GetUpperBound()], b.GetCumulativeCount())
Expand Down
36 changes: 9 additions & 27 deletions exporter/prometheusexporter/end_to_end_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,7 @@ func TestEndToEndSummarySupport(t *testing.T) {
defer dropWizardServer.Close()

srvURL, err := url.Parse(dropWizardServer.URL)
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)

ctx, cancel := context.WithCancel(context.Background())
defer cancel()
Expand All @@ -67,12 +65,8 @@ func TestEndToEndSummarySupport(t *testing.T) {
exporterFactory := NewFactory()
set := exportertest.NewNopSettings()
exporter, err := exporterFactory.CreateMetrics(ctx, set, exporterCfg)
if err != nil {
t.Fatal(err)
}
if err = exporter.Start(ctx, nil); err != nil {
t.Fatalf("Failed to start the Prometheus exporter: %v", err)
}
require.NoError(t, err)
require.NoError(t, exporter.Start(ctx, nil), "Failed to start the Prometheus exporter: %v", err)
t.Cleanup(func() { require.NoError(t, exporter.Shutdown(ctx)) })

// 3. Create the Prometheus receiver scraping from the DropWizard mock server and
Expand All @@ -89,9 +83,7 @@ func TestEndToEndSummarySupport(t *testing.T) {
- targets: ['%s']
`, srvURL.Host))
receiverConfig := new(prometheusreceiver.PromConfig)
if err = yaml.Unmarshal(yamlConfig, receiverConfig); err != nil {
t.Fatal(err)
}
require.NoError(t, yaml.Unmarshal(yamlConfig, receiverConfig))

receiverFactory := prometheusreceiver.NewFactory()
receiverCreateSet := receivertest.NewNopSettings()
Expand All @@ -100,26 +92,18 @@ func TestEndToEndSummarySupport(t *testing.T) {
}
// 3.5 Create the Prometheus receiver and pass in the previously created Prometheus exporter.
prometheusReceiver, err := receiverFactory.CreateMetrics(ctx, receiverCreateSet, rcvCfg, exporter)
if err != nil {
t.Fatal(err)
}
if err = prometheusReceiver.Start(ctx, nil); err != nil {
t.Fatalf("Failed to start the Prometheus receiver: %v", err)
}
require.NoError(t, err)
require.NoError(t, prometheusReceiver.Start(ctx, nil), "Failed to start the Prometheus receiver: %v", err)
t.Cleanup(func() { require.NoError(t, prometheusReceiver.Shutdown(ctx)) })

// 4. Scrape from the Prometheus receiver to ensure that we export summary metrics
wg.Wait()

res, err := http.Get("http://" + exporterCfg.Endpoint + "/metrics")
if err != nil {
t.Fatalf("Failed to scrape from the exporter: %v", err)
}
require.NoError(t, err, "Failed to scrape from the exporter: %v", err)
prometheusExporterScrape, err := io.ReadAll(res.Body)
res.Body.Close()
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)

// 5. Verify that we have the summary metrics and that their values make sense.
wantLineRegexps := []string{
Expand Down Expand Up @@ -171,9 +155,7 @@ func TestEndToEndSummarySupport(t *testing.T) {
// After this replacement, there should ONLY be newlines present.
prometheusExporterScrape = bytes.ReplaceAll(prometheusExporterScrape, []byte("\n"), []byte(""))
// Now assert that NO output was left over.
if len(prometheusExporterScrape) != 0 {
t.Fatalf("Left-over unmatched Prometheus scrape content: %q\n", prometheusExporterScrape)
}
require.Empty(t, prometheusExporterScrape, "Left-over unmatched Prometheus scrape content: %q\n", prometheusExporterScrape)
}

// the following triggers G101: Potential hardcoded credentials
Expand Down
Loading

0 comments on commit bbdd281

Please sign in to comment.