Skip to content

Commit

Permalink
restructure ack logging
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexandraBenson committed Jan 7, 2025
1 parent a54da2a commit f5f2b8b
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 60 deletions.
6 changes: 4 additions & 2 deletions ack_backend/src/ack_processor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Ack lambda handler"""

import json
from log_structure_splunk import ack_function_info, convert_messsage_to_ack_row_logging_decorator
from logging_decorators import ack_lambda_handler_logging_decorator, convert_messsage_to_ack_row_logging_decorator
from update_ack_file import update_ack_file, create_ack_data


Expand Down Expand Up @@ -29,7 +31,7 @@ def convert_message_to_ack_row(message, expected_file_key, expected_created_at_f
)


@ack_function_info
@ack_lambda_handler_logging_decorator
def lambda_handler(event, context):

if not event.get("Records"):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Decorators for logging and sending logs to Firehose"""

import os
import json
import time
Expand All @@ -14,12 +16,23 @@ def send_log_to_firehose(log_data: dict) -> None:
"""Sends the log_message to Firehose"""
try:
record = {"Data": json.dumps({"event": log_data}).encode("utf-8")}
response = firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record)
logger.info("Log sent to Firehose: %s", response) # TODO: Should we be logging full response?
firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record)
logger.info("Log sent to Firehose")
except Exception as error: # pylint:disable = broad-exception-caught
logger.exception("Error sending log to Firehose: %s", error)


def generate_and_send_logs(
start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False
) -> None:
"""Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the
current time and given start_time) and sends them to Cloudwatch and Firehose."""
log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data}
log_function = logger.error if is_error_log else logger.info
log_function(json.dumps(log_data))
send_log_to_firehose(log_data)


def convert_messsage_to_ack_row_logging_decorator(func):
"""This decorator logs the information on the conversion of a single message to an ack data row"""

Expand All @@ -36,45 +49,30 @@ def wrapper(message, expected_file_key, expected_created_at_formatted_string):
message_id = message.get("row_id", "unknown")
diagnostics = message.get("diagnostics")

diagnostics_result = process_diagnostics(diagnostics, file_key, message_id)

log_data = {
**base_log_data,
additional_log_data = {
"file_key": file_key,
"message_id": message.get("row_id", "unknown"),
"message_id": message_id,
"vaccine_type": message.get("vaccine_type", "unknown"),
"supplier": message.get("supplier", "unknown"),
"local_id": message.get("local_id", "unknown"),
"operation_requested": message.get("action_flag", "unknown"),
"time_taken": f"{round(time.time() - start_time, 5)}s",
**diagnostics_result,
**process_diagnostics(diagnostics, file_key, message_id),
}

try:
logger.info(f"Function executed successfully: {json.dumps(log_data)}")
send_log_to_firehose(log_data)
except Exception:
logger.warning("Issue with logging")
generate_and_send_logs(start_time, base_log_data, additional_log_data)

return result

except Exception as e:
log_data = {
"status": "fail",
"statusCode": 500,
"diagnostics": f"Error converting message to ack row: {str(e)}",
"date_time": str(datetime.now()),
"error_source": "convert_message_to_ack_row",
}
send_log_to_firehose(log_data)
except Exception as error:
additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)}
generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True)

raise

return wrapper


def ack_function_info(func):
"""This decorator logs the execution info for the decorated function and sends it to Splunk."""
def ack_lambda_handler_logging_decorator(func):
"""This decorator logs the execution info for the ack lambda handler."""

@wraps(func)
def wrapper(event, context, *args, **kwargs):
Expand All @@ -84,22 +82,15 @@ def wrapper(event, context, *args, **kwargs):

try:
result = func(event, context, *args, **kwargs)

additional_log_data = {"statusCode": 200, "message": "Lambda function executed successfully!"}
generate_and_send_logs(start_time, base_log_data, additional_log_data)

return result

except Exception as e:
end_time = time.time()
log_data = {
**base_log_data,
"time_taken": f"{round(end_time - start_time, 5)}s",
"status": "fail",
"statusCode": 500,
"diagnostics": f"Error in ack_processor_{func.__name__}: {str(e)}",
}
try:
logger.exception(f"Critical error in function: logging for {func.__name__}")
send_log_to_firehose(log_data)
except Exception:
logger.warning("Issue with logging")
except Exception as error:
additional_log_data = {"status": "fail", "statusCode": 500, "diagnostics": str(error)}
generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True)

raise

Expand Down
14 changes: 6 additions & 8 deletions ack_backend/tests/test_ack_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def environment_setup(self, ack_file_name, existing_content):

s3_client.put_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_name, Body=existing_content)

@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
def test_lambda_handler_main(self, mock_send_log_to_firehose):
"""Test lambda handler with dynamic ack_file_name and consistent row_template."""
test_bucket_name = "immunisation-batch-internal-testlambda-data-destinations"
Expand Down Expand Up @@ -207,7 +207,7 @@ def test_lambda_handler_main(self, mock_send_log_to_firehose):

s3_client.delete_object(Bucket=test_bucket_name, Key=file_info["ack_file_name"])

@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
def test_lambda_handler_existing(self, mock_send_log_to_firehose):
"""Test lambda handler with dynamic ack_file_name and consistent row_template with an already existing
ack file with content."""
Expand Down Expand Up @@ -472,7 +472,7 @@ def test_obtain_current_ack_content_file_exists(self):

s3_client.delete_object(Bucket=DESTINATION_BUCKET_NAME, Key=ack_file_key)

@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("update_ack_file.create_ack_data")
@patch("update_ack_file.update_ack_file")
def test_lambda_handler_error_scenarios(
Expand All @@ -485,16 +485,14 @@ def test_lambda_handler_error_scenarios(

mock_send_log_to_firehose.assert_called()
error_log = mock_send_log_to_firehose.call_args[0][0]
self.assertIn(
"Error in ack_processor_lambda_handler: No records found in the event", error_log["diagnostics"]
)
self.assertIn("No records found in the event", error_log["diagnostics"])
mock_send_log_to_firehose.reset_mock()

test_cases = [
{
"description": "Malformed JSON in SQS body",
"event": {"Records": [{""}]},
"expected_message": "Error in ack_processor_lambda_handler: Could not load incoming message body",
"expected_message": "Could not load incoming message body",
},
{
"description": "Invalid value in 'diagnostics' field",
Expand All @@ -516,7 +514,7 @@ def test_lambda_handler_error_scenarios(
}
]
},
"expected_message": "Error in ack_processor_lambda_handler: Diagnostics must be either None or a string",
"expected_message": "Diagnostics must be either None or a string",
},
]
# TODO: What was below meant to be testing?
Expand Down
20 changes: 10 additions & 10 deletions ack_backend/tests/test_splunk_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ def extract_log_json(self, log_entry):
@patch("update_ack_file.obtain_current_ack_content")
@patch("ack_processor.create_ack_data")
@patch("ack_processor.update_ack_file")
@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("time.time")
@patch("log_structure_splunk.datetime")
@patch("logging_decorators.datetime")
def test_splunk_logging_successful_rows(
self,
mock_datetime,
Expand Down Expand Up @@ -88,9 +88,9 @@ def test_splunk_logging_successful_rows(
@patch("update_ack_file.obtain_current_ack_content")
@patch("ack_processor.create_ack_data")
@patch("ack_processor.update_ack_file")
@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("time.time")
@patch("log_structure_splunk.datetime")
@patch("logging_decorators.datetime")
def test_splunk_logging_missing_data(
self,
mock_datetime,
Expand Down Expand Up @@ -133,9 +133,9 @@ def test_splunk_logging_missing_data(
@patch("update_ack_file.obtain_current_ack_content")
@patch("ack_processor.create_ack_data")
@patch("ack_processor.update_ack_file")
@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("time.time")
@patch("log_structure_splunk.datetime")
@patch("logging_decorators.datetime")
def test_splunk_logging_statuscode_diagnostics(
self,
mock_datetime,
Expand Down Expand Up @@ -194,9 +194,9 @@ def test_splunk_logging_statuscode_diagnostics(
@patch("update_ack_file.obtain_current_ack_content")
@patch("ack_processor.create_ack_data")
@patch("ack_processor.update_ack_file")
@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("time.time")
@patch("log_structure_splunk.datetime")
@patch("logging_decorators.datetime")
def test_splunk_logging_multiple_rows(
self,
mock_datetime,
Expand Down Expand Up @@ -242,9 +242,9 @@ def test_splunk_logging_multiple_rows(
@patch("update_ack_file.obtain_current_ack_content")
@patch("ack_processor.create_ack_data")
@patch("ack_processor.update_ack_file")
@patch("log_structure_splunk.send_log_to_firehose")
@patch("logging_decorators.send_log_to_firehose")
@patch("time.time")
@patch("log_structure_splunk.datetime")
@patch("logging_decorators.datetime")
def test_splunk_logging_multiple_with_diagnostics(
self,
mock_datetime,
Expand Down

0 comments on commit f5f2b8b

Please sign in to comment.