diff --git a/CHANGELOG.md b/CHANGELOG.md index df6d49696..4a3ef4057 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,6 +41,7 @@ the list is now fixed inside the packaged logprep * fix `_get_value` in `FilterExpression` so that keys don't match on values * fix `auto_rule_tester` to work with `LOGPREP_BYPASS_RULE_TREE` enabled * fix `opensearch_output` not draining `message_backlog` on shutdown +* silence `FieldExists` warning in metrics when `LOGPREP_APPEND_MEASUREMENT_TO_EVENT` is active ## 14.0.0 ### Breaking diff --git a/logprep/metrics/metrics.py b/logprep/metrics/metrics.py index c89c6621c..28a8b175d 100644 --- a/logprep/metrics/metrics.py +++ b/logprep/metrics/metrics.py @@ -117,14 +117,14 @@ import os import time +from _socket import gethostname from abc import ABC, abstractmethod from typing import Any, Union -from _socket import gethostname from attrs import define, field, validators from prometheus_client import CollectorRegistry, Counter, Gauge, Histogram -from logprep.util.helper import add_fields_to +from logprep.util.helper import _add_field_to_silent_fail @define(kw_only=True, slots=False) @@ -222,14 +222,22 @@ def inner(self, *args, **kwargs): # nosemgrep if hasattr(self, "rule_type"): event = args[0] if event: - add_fields_to( - event, fields={f"processing_times.{self.rule_type}": duration} + _add_field_to_silent_fail( + event=event, + field=(f"processing_times.{self.rule_type}", duration), + rule=None, ) if hasattr(self, "_logprep_config"): # attribute of the Pipeline class event = args[0] if event: - add_fields_to(event, fields={"processing_times.pipeline": duration}) - add_fields_to(event, fields={"processing_times.hostname": gethostname()}) + _add_field_to_silent_fail( + event=event, field=("processing_times.pipeline", duration), rule=None + ) + _add_field_to_silent_fail( + event=event, + field=("processing_times.hostname", gethostname()), + rule=None, + ) return result return inner diff --git a/tests/unit/metrics/test_metrics.py b/tests/unit/metrics/test_metrics.py index 0ce5a2e85..250279ab5 100644 --- a/tests/unit/metrics/test_metrics.py +++ b/tests/unit/metrics/test_metrics.py @@ -349,10 +349,6 @@ def test_no_label_values_injection(self): metrics_output = generate_latest(self.metrics.custom_registry).decode("utf-8") assert "test_metric_without_label_values" not in metrics_output - @Metric.measure_time(metric_name="test_metric_histogram") - def decorated_function(self): - pass - @mock.patch.dict("os.environ", {"LOGPREP_APPEND_MEASUREMENT_TO_EVENT": "1"}, clear=True) def test_measure_time_measures_and_appends_processing_times_but_not_hostname(self): @Metric.measure_time(metric_name="test_metric_histogram") @@ -378,6 +374,36 @@ def decorated_function_append(self, document): assert "test_rule" in document.get("processing_times") assert document.get("processing_times").get("test_rule") > 0 + @mock.patch.dict("os.environ", {"LOGPREP_APPEND_MEASUREMENT_TO_EVENT": "1"}, clear=True) + def test_measure_time_measures_and_appends_processing_times_two_times(self): + # simulates consecutive calls from processors that appear two times in + # the pipeline, more precise two of the same rule_types appear in the + # pipeline + + @Metric.measure_time(metric_name="test_metric_histogram") + def decorated_function_append(self, document): + pass + + metric_output = generate_latest(self.metrics.custom_registry).decode("utf-8") + assert re.search(r"test_metric_histogram_sum.* 0\.0", metric_output) + assert re.search(r"test_metric_histogram_count.* 0\.0", metric_output) + assert re.search(r"test_metric_histogram_bucket.* 0\.0", metric_output) + document = {"test": "event"} + decorated_function_append(self, document) + decorated_function_append(self, document) + + metric_output = generate_latest(self.metrics.custom_registry).decode("utf-8") + assert not re.search(r"test_metric_histogram_sum.* 0\.0", metric_output) + assert re.search(r"test_metric_histogram_count.* 2\.0", metric_output) + assert re.search(r"test_metric_histogram_bucket.* 2\.0", metric_output) + assert not re.search( + r"test_metric_histogram_bucket.* 3\.0", metric_output + ) # regex is greedy + assert "processing_times" in document + assert not "hostname" in document.get("processing_times") # is only set by the pipeline + assert "test_rule" in document.get("processing_times") + assert document.get("processing_times").get("test_rule") > 0 + @mock.patch("logprep.metrics.metrics.gethostname", return_value="testhost") @mock.patch.dict("os.environ", {"LOGPREP_APPEND_MEASUREMENT_TO_EVENT": "1"}, clear=True) def test_measure_time_measures_and_appends_pipeline_processing_times_and_hostname( @@ -412,11 +438,15 @@ def decorated_function_append(self, document): mock_gethostname.assert_called_once() def test_measure_time_measures(self): + @Metric.measure_time(metric_name="test_metric_histogram") + def decorated_function(self): + pass + metric_output = generate_latest(self.metrics.custom_registry).decode("utf-8") assert re.search(r"test_metric_histogram_sum.* 0\.0", metric_output) assert re.search(r"test_metric_histogram_count.* 0\.0", metric_output) assert re.search(r"test_metric_histogram_bucket.* 0\.0", metric_output) - self.decorated_function() + decorated_function(self) metric_output = generate_latest(self.metrics.custom_registry).decode("utf-8") assert not re.search(r"test_metric_histogram_sum.* 0\.0", metric_output) @@ -450,5 +480,5 @@ def decorated_function_append(self, document): r"test_metric_histogram_bucket.* 2\.0", metric_output ) # regex is greedy assert document == {} - # assert call on time.perf_counter to ensure that correct decorator was accessed + # assert call on time.perf_counter to ensure that the correct decorator was accessed mock_perf_counter.assert_called()