diff --git a/ack_backend/src/ack_processor.py b/ack_backend/src/ack_processor.py index 4f3c4637..1d6ce669 100644 --- a/ack_backend/src/ack_processor.py +++ b/ack_backend/src/ack_processor.py @@ -1,5 +1,7 @@ +"""Ack lambda handler""" + import json -from log_structure_splunk import ack_function_info, convert_messsage_to_ack_row_logging_decorator +from logging_decorators import ack_lambda_handler_logging_decorator, convert_messsage_to_ack_row_logging_decorator from update_ack_file import update_ack_file, create_ack_data @@ -29,7 +31,7 @@ def convert_message_to_ack_row(message, expected_file_key, expected_created_at_f ) -@ack_function_info +@ack_lambda_handler_logging_decorator def lambda_handler(event, context): if not event.get("Records"): diff --git a/ack_backend/src/log_structure_splunk.py b/ack_backend/src/logging_decorators.py similarity index 59% rename from ack_backend/src/log_structure_splunk.py rename to ack_backend/src/logging_decorators.py index ddfec7b5..10409c07 100644 --- a/ack_backend/src/log_structure_splunk.py +++ b/ack_backend/src/logging_decorators.py @@ -1,3 +1,5 @@ +"""Decorators for logging and sending logs to Firehose""" + import os import json import time @@ -14,12 +16,23 @@ def send_log_to_firehose(log_data: dict) -> None: """Sends the log_message to Firehose""" try: record = {"Data": json.dumps({"event": log_data}).encode("utf-8")} - response = firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record) - logger.info("Log sent to Firehose: %s", response) # TODO: Should we be logging full response? + firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record) + logger.info("Log sent to Firehose") except Exception as error: # pylint:disable = broad-exception-caught logger.exception("Error sending log to Firehose: %s", error) +def generate_and_send_logs( + start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False +) -> None: + """Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the + current time and given start_time) and sends them to Cloudwatch and Firehose.""" + log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data} + log_function = logger.error if is_error_log else logger.info + log_function(json.dumps(log_data)) + send_log_to_firehose(log_data) + + def convert_messsage_to_ack_row_logging_decorator(func): """This decorator logs the information on the conversion of a single message to an ack data row""" @@ -36,45 +49,30 @@ def wrapper(message, expected_file_key, expected_created_at_formatted_string): message_id = message.get("row_id", "unknown") diagnostics = message.get("diagnostics") - diagnostics_result = process_diagnostics(diagnostics, file_key, message_id) - - log_data = { - **base_log_data, + additional_log_data = { "file_key": file_key, - "message_id": message.get("row_id", "unknown"), + "message_id": message_id, "vaccine_type": message.get("vaccine_type", "unknown"), "supplier": message.get("supplier", "unknown"), "local_id": message.get("local_id", "unknown"), "operation_requested": message.get("action_flag", "unknown"), - "time_taken": f"{round(time.time() - start_time, 5)}s", - **diagnostics_result, + **process_diagnostics(diagnostics, file_key, message_id), } - - try: - logger.info(f"Function executed successfully: {json.dumps(log_data)}") - send_log_to_firehose(log_data) - except Exception: - logger.warning("Issue with logging") + generate_and_send_logs(start_time, base_log_data, additional_log_data) return result - except Exception as e: - log_data = { - "status": "fail", - "statusCode": 500, - "diagnostics": f"Error converting message to ack row: {str(e)}", - "date_time": str(datetime.now()), - "error_source": "convert_message_to_ack_row", - } - send_log_to_firehose(log_data) + except Exception as error: + additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)} + generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) raise return wrapper -def ack_function_info(func): - """This decorator logs the execution info for the decorated function and sends it to Splunk.""" +def ack_lambda_handler_logging_decorator(func): + """This decorator logs the execution info for the ack lambda handler.""" @wraps(func) def wrapper(event, context, *args, **kwargs): @@ -84,22 +82,15 @@ def wrapper(event, context, *args, **kwargs): try: result = func(event, context, *args, **kwargs) + + additional_log_data = {"statusCode": 200, "message": "Lambda function executed successfully!"} + generate_and_send_logs(start_time, base_log_data, additional_log_data) + return result - except Exception as e: - end_time = time.time() - log_data = { - **base_log_data, - "time_taken": f"{round(end_time - start_time, 5)}s", - "status": "fail", - "statusCode": 500, - "diagnostics": f"Error in ack_processor_{func.__name__}: {str(e)}", - } - try: - logger.exception(f"Critical error in function: logging for {func.__name__}") - send_log_to_firehose(log_data) - except Exception: - logger.warning("Issue with logging") + except Exception as error: + additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)} + generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) raise diff --git a/ack_backend/tests/test_ack_processor.py b/ack_backend/tests/test_ack_processor.py index 96a572a6..de777d8f 100644 --- a/ack_backend/tests/test_ack_processor.py +++ b/ack_backend/tests/test_ack_processor.py @@ -144,7 +144,7 @@ def environment_setup(self, ack_file_name, existing_content): s3_client.put_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_name, Body=existing_content) - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") def test_lambda_handler_main(self, mock_send_log_to_firehose): """Test lambda handler with dynamic ack_file_name and consistent row_template.""" test_bucket_name = "immunisation-batch-internal-testlambda-data-destinations" @@ -207,7 +207,7 @@ def test_lambda_handler_main(self, mock_send_log_to_firehose): s3_client.delete_object(Bucket=test_bucket_name, Key=file_info["ack_file_name"]) - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") def test_lambda_handler_existing(self, mock_send_log_to_firehose): """Test lambda handler with dynamic ack_file_name and consistent row_template with an already existing ack file with content.""" @@ -472,7 +472,7 @@ def test_obtain_current_ack_content_file_exists(self): s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key) - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("update_ack_file.create_ack_data") @patch("update_ack_file.update_ack_file") def test_lambda_handler_error_scenarios( @@ -485,16 +485,14 @@ def test_lambda_handler_error_scenarios( mock_send_log_to_firehose.assert_called() error_log = mock_send_log_to_firehose.call_args[0][0] - self.assertIn( - "Error in ack_processor_lambda_handler: No records found in the event", error_log["diagnostics"] - ) + self.assertIn("No records found in the event", error_log["diagnostics"]) mock_send_log_to_firehose.reset_mock() test_cases = [ { "description": "Malformed JSON in SQS body", "event": {"Records": [{""}]}, - "expected_message": "Error in ack_processor_lambda_handler: Could not load incoming message body", + "expected_message": "Could not load incoming message body", }, { "description": "Invalid value in 'diagnostics' field", @@ -516,7 +514,7 @@ def test_lambda_handler_error_scenarios( } ] }, - "expected_message": "Error in ack_processor_lambda_handler: Diagnostics must be either None or a string", + "expected_message": "Diagnostics must be either None or a string", }, ] # TODO: What was below meant to be testing? diff --git a/ack_backend/tests/test_splunk_logging.py b/ack_backend/tests/test_splunk_logging.py index aef0b181..969315b5 100644 --- a/ack_backend/tests/test_splunk_logging.py +++ b/ack_backend/tests/test_splunk_logging.py @@ -34,9 +34,9 @@ def extract_log_json(self, log_entry): @patch("update_ack_file.obtain_current_ack_content") @patch("ack_processor.create_ack_data") @patch("ack_processor.update_ack_file") - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("time.time") - @patch("log_structure_splunk.datetime") + @patch("logging_decorators.datetime") def test_splunk_logging_successful_rows( self, mock_datetime, @@ -88,9 +88,9 @@ def test_splunk_logging_successful_rows( @patch("update_ack_file.obtain_current_ack_content") @patch("ack_processor.create_ack_data") @patch("ack_processor.update_ack_file") - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("time.time") - @patch("log_structure_splunk.datetime") + @patch("logging_decorators.datetime") def test_splunk_logging_missing_data( self, mock_datetime, @@ -133,9 +133,9 @@ def test_splunk_logging_missing_data( @patch("update_ack_file.obtain_current_ack_content") @patch("ack_processor.create_ack_data") @patch("ack_processor.update_ack_file") - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("time.time") - @patch("log_structure_splunk.datetime") + @patch("logging_decorators.datetime") def test_splunk_logging_statuscode_diagnostics( self, mock_datetime, @@ -194,9 +194,9 @@ def test_splunk_logging_statuscode_diagnostics( @patch("update_ack_file.obtain_current_ack_content") @patch("ack_processor.create_ack_data") @patch("ack_processor.update_ack_file") - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("time.time") - @patch("log_structure_splunk.datetime") + @patch("logging_decorators.datetime") def test_splunk_logging_multiple_rows( self, mock_datetime, @@ -242,9 +242,9 @@ def test_splunk_logging_multiple_rows( @patch("update_ack_file.obtain_current_ack_content") @patch("ack_processor.create_ack_data") @patch("ack_processor.update_ack_file") - @patch("log_structure_splunk.send_log_to_firehose") + @patch("logging_decorators.send_log_to_firehose") @patch("time.time") - @patch("log_structure_splunk.datetime") + @patch("logging_decorators.datetime") def test_splunk_logging_multiple_with_diagnostics( self, mock_datetime,