Skip to content

Commit

Permalink
Implements apocalyptic testing framework
Browse files Browse the repository at this point in the history
Refs #938
  • Loading branch information
shawncrawley committed Nov 12, 2024
1 parent d698160 commit 0e254a2
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 108 deletions.
97 changes: 2 additions & 95 deletions Core/LAMBDA/viz_functions/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -215,82 +215,6 @@ locals {
########################################################################################################################################
########################################################################################################################################

###############################
## WRDS API Handler Function ##
###############################
data "archive_file" "wrds_api_handler_zip" {
type = "zip"

source_file = "${path.module}/viz_wrds_api_handler/lambda_function.py"

output_path = "${path.module}/temp/viz_wrds_api_handler_${var.environment}_${var.region}.zip"
}

resource "aws_s3_object" "wrds_api_handler_zip_upload" {
bucket = var.deployment_bucket
key = "terraform_artifacts/${path.module}/viz_wrds_api_handler.zip"
source = data.archive_file.wrds_api_handler_zip.output_path
source_hash = filemd5(data.archive_file.wrds_api_handler_zip.output_path)
}

resource "aws_lambda_function" "viz_wrds_api_handler" {
function_name = "hv-vpp-${var.environment}-viz-wrds-api-handler"
description = "Lambda function to ping WRDS API and format outputs for processing."
memory_size = 512
timeout = 900
vpc_config {
security_group_ids = [var.nat_sg_group]
subnet_ids = var.db_lambda_subnets
}
environment {
variables = {
DATASERVICES_HOST = var.dataservices_host
PYTHON_PREPROCESSING_BUCKET = var.python_preprocessing_bucket
PROCESSED_OUTPUT_PREFIX = "max_stage/ahps"
INITIALIZE_PIPELINE_FUNCTION = aws_lambda_function.viz_initialize_pipeline.arn
}
}
s3_bucket = aws_s3_object.wrds_api_handler_zip_upload.bucket
s3_key = aws_s3_object.wrds_api_handler_zip_upload.key
source_code_hash = filebase64sha256(data.archive_file.wrds_api_handler_zip.output_path)
runtime = "python3.9"
handler = "lambda_function.lambda_handler"
role = var.lambda_role
layers = [
var.arcgis_python_api_layer,
var.es_logging_layer,
var.viz_lambda_shared_funcs_layer
]
tags = {
"Name" = "hv-vpp-${var.environment}-viz-wrds-api-handler"
}
}

resource "aws_cloudwatch_event_target" "check_lambda_every_five_minutes" {
rule = var.five_minute_trigger.name
target_id = aws_lambda_function.viz_initialize_pipeline.function_name
arn = aws_lambda_function.viz_initialize_pipeline.arn
input = "{\"configuration\":\"rfc\"}"
}

resource "aws_lambda_permission" "allow_cloudwatch_to_call_check_lambda" {
statement_id = "AllowExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.viz_wrds_api_handler.function_name
principal = "events.amazonaws.com"
source_arn = var.five_minute_trigger.arn
}

resource "aws_lambda_function_event_invoke_config" "viz_wrds_api_handler" {
function_name = resource.aws_lambda_function.viz_wrds_api_handler.function_name
maximum_retry_attempts = 0
destination_config {
on_failure {
destination = var.email_sns_topics["viz_lambda_errors"].arn
}
}
}

##################################
## EGIS Health Checker Function ##
##################################
Expand Down Expand Up @@ -560,29 +484,16 @@ resource "aws_lambda_function" "viz_initialize_pipeline" {
}
}

# resource "aws_sns_topic_subscription" "viz_initialize_pipeline_subscriptions" {
# for_each = local.initialize_pipeline_subscriptions
# topic_arn = var.sns_topics["${each.value}"].arn
# protocol = "lambda"
# endpoint = resource.aws_lambda_function.viz_initialize_pipeline.arn
# }

# resource "aws_lambda_permission" "viz_initialize_pipeline_permissions" {
# for_each = local.initialize_pipeline_subscriptions
# action = "lambda:InvokeFunction"
# function_name = resource.aws_lambda_function.viz_initialize_pipeline.function_name
# principal = "sns.amazonaws.com"
# source_arn = var.sns_topics["${each.value}"].arn
# }

resource "aws_sns_topic_subscription" "viz_initialize_pipeline_subscription_shared_nwm" {
count = var.environment == "ti" ? 0 : 1
provider = aws.sns
topic_arn = var.nws_shared_account_nwm_sns
protocol = "lambda"
endpoint = resource.aws_lambda_function.viz_initialize_pipeline.arn
}

resource "aws_lambda_permission" "viz_initialize_pipeline_permissions_shared_nwm" {
count = var.environment == "ti" ? 0 : 1
action = "lambda:InvokeFunction"
function_name = resource.aws_lambda_function.viz_initialize_pipeline.function_name
principal = "sns.amazonaws.com"
Expand Down Expand Up @@ -999,10 +910,6 @@ output "publish_service" {
value = aws_lambda_function.viz_publish_service
}

output "wrds_api_handler" {
value = aws_lambda_function.viz_wrds_api_handler
}

output "egis_health_checker" {
value = aws_lambda_function.egis_health_checker
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def __init__(self, start_event, print_init=True):
self.configuration = configuration(config, reference_time=self.reference_time, input_bucket=bucket)
elif "Records" in self.start_event: # Records in the start_event denotes a SNS trigger of the lambda function.
self.invocation_type = "sns"
elif "invocation_type" in self.start_event: # Currently the max_flows and wrds_api_handler lambda functions manually invoke this lambda function and specify a "invocation_type" key in the payload. This is how we identify that.
elif "invocation_type" in self.start_event: # The max_flows lambda function manually invokes this lambda function and includes the "invocation_type" key in the payload.
self.invocation_type = "lambda" #TODO: Clean this up to actually pull the value from the payload
else:
self.invocation_type = "manual"
Expand Down
1 change: 1 addition & 0 deletions Core/StepFunctions/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ resource "aws_sfn_state_machine" "replace_route_step_function" {
}

resource "aws_cloudwatch_event_target" "check_lambda_every_five_minutes" {
count = var.environment == "ti" ? 0 : 1
rule = var.fifteen_minute_trigger.name
target_id = aws_sfn_state_machine.replace_route_step_function.name
arn = aws_sfn_state_machine.replace_route_step_function.arn
Expand Down
70 changes: 70 additions & 0 deletions Core/Testing/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
variable "environment" {
type = "string"
}

variable "test_data_bucket" {
type = "string"
}

variable "step_function_arn" {
type = "string"
}

resource "aws_cloudwatch_event_rule" "detect_test_files" {
name = "hv-vpp-${var.environment}-detect-test-files"
description = "Detects when a new test file has been created"
event_pattern = <<EOF
{
"source": ["aws.s3"],
"detail-type": ["Object Created"],
"detail": {
"bucket": {
"name": ["${var.test_data_bucket}"]
},
"object": {
"key": [{
"prefix": "common/data/model/com/nwm/prod/nwm."
}]
}
}
}
EOF
}

resource "aws_cloudwatch_event_target" "trigger_pipeline_test_run" {
rule = aws_cloudwatch_event_rule.detect_test_files.name
target_id = aws_lambda_function.viz_initialize_pipeline.function_name
arn = aws_lambda_function.viz_initialize_pipeline.arn
input_transformer {
input_paths = {
"s3_bucket": "$.detail.bucket.name",
"s3_key": "$.detail.object.key"
}
input_template = <<EOF
{
"Records": [
{
"Sns": {
"Message": "{\"Records\": [{\"s3\": {\"bucket\": {\"name\": \"<s3_bucket>\"}, \"object\": {\"key\": \"<s3_key>\"}}}]}"
}
}
]
}
EOF
}
}

# Kick off tests in TI
data "aws_s3_objects" "test_nwm_outputs" {
bucket = var.test_data_bucket
prefix = "test_nwm_outputs/"
max_keys = 2000
depends_on = [var.step_function_arn]
}

resource "aws_s3_object_copy" "test" {
count = length(data.aws_s3_objects.test_nwm_outputs.keys)
bucket = var.test_data_bucket
source = join("/", [var.test_data_bucket, element(data.aws_s3_objects.test_nwm_outputs.keys, count.index)])
key = replace(element(data.aws_s3_objects.test_nwm_outputs.keys, count.index), "test_nwm_outputs", formatdate("'common/data/model/com/nwm/prod/nwm.'YYYYDDMM", timestamp()))
}
19 changes: 7 additions & 12 deletions Core/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -707,16 +707,11 @@ module "sync-wrds-location-db" {
db_dumps_bucket = module.s3.buckets["deployment"].bucket
}

# Kick off tests in TI
data "aws_s3_objects" "test_nwm_outputs" {
bucket = module.s3.buckets["deployment"].bucket
prefix = "test_nwm_outputs/"
depends_on = []
}

resource "aws_s3_object_copy" "test" {
count = length(data.aws_s3_objects.test_nwm_outputs.keys)
bucket = module.s3.buckets["deployment"].bucket
key = replace(element(data.aws_s3_objects.test_nwm_outputs.keys, count.index), "test_nwm_outputs", formatdate("YYYYDDMM", timestamp()))
source = element(data.aws_s3_objects.test_nwm_outputs.keys, count.index)
module "testing" {
count = local.env.environment == "ti" ? 1 : 0
source = "./Testing"

environment = local.env.environment
test_data_bucket = module.s3.buckets["deployment"].bucket
step_function_arn = module.step-functions.viz_pipeline_step_function.arn
}

0 comments on commit 0e254a2

Please sign in to comment.