diff --git a/Core/LAMBDA/viz_functions/main.tf b/Core/LAMBDA/viz_functions/main.tf index 40a6104a..4d5259b7 100644 --- a/Core/LAMBDA/viz_functions/main.tf +++ b/Core/LAMBDA/viz_functions/main.tf @@ -215,82 +215,6 @@ locals { ######################################################################################################################################## ######################################################################################################################################## -############################### -## WRDS API Handler Function ## -############################### -data "archive_file" "wrds_api_handler_zip" { - type = "zip" - - source_file = "${path.module}/viz_wrds_api_handler/lambda_function.py" - - output_path = "${path.module}/temp/viz_wrds_api_handler_${var.environment}_${var.region}.zip" -} - -resource "aws_s3_object" "wrds_api_handler_zip_upload" { - bucket = var.deployment_bucket - key = "terraform_artifacts/${path.module}/viz_wrds_api_handler.zip" - source = data.archive_file.wrds_api_handler_zip.output_path - source_hash = filemd5(data.archive_file.wrds_api_handler_zip.output_path) -} - -resource "aws_lambda_function" "viz_wrds_api_handler" { - function_name = "hv-vpp-${var.environment}-viz-wrds-api-handler" - description = "Lambda function to ping WRDS API and format outputs for processing." - memory_size = 512 - timeout = 900 - vpc_config { - security_group_ids = [var.nat_sg_group] - subnet_ids = var.db_lambda_subnets - } - environment { - variables = { - DATASERVICES_HOST = var.dataservices_host - PYTHON_PREPROCESSING_BUCKET = var.python_preprocessing_bucket - PROCESSED_OUTPUT_PREFIX = "max_stage/ahps" - INITIALIZE_PIPELINE_FUNCTION = aws_lambda_function.viz_initialize_pipeline.arn - } - } - s3_bucket = aws_s3_object.wrds_api_handler_zip_upload.bucket - s3_key = aws_s3_object.wrds_api_handler_zip_upload.key - source_code_hash = filebase64sha256(data.archive_file.wrds_api_handler_zip.output_path) - runtime = "python3.9" - handler = "lambda_function.lambda_handler" - role = var.lambda_role - layers = [ - var.arcgis_python_api_layer, - var.es_logging_layer, - var.viz_lambda_shared_funcs_layer - ] - tags = { - "Name" = "hv-vpp-${var.environment}-viz-wrds-api-handler" - } -} - -resource "aws_cloudwatch_event_target" "check_lambda_every_five_minutes" { - rule = var.five_minute_trigger.name - target_id = aws_lambda_function.viz_initialize_pipeline.function_name - arn = aws_lambda_function.viz_initialize_pipeline.arn - input = "{\"configuration\":\"rfc\"}" -} - -resource "aws_lambda_permission" "allow_cloudwatch_to_call_check_lambda" { - statement_id = "AllowExecutionFromCloudWatch" - action = "lambda:InvokeFunction" - function_name = aws_lambda_function.viz_wrds_api_handler.function_name - principal = "events.amazonaws.com" - source_arn = var.five_minute_trigger.arn -} - -resource "aws_lambda_function_event_invoke_config" "viz_wrds_api_handler" { - function_name = resource.aws_lambda_function.viz_wrds_api_handler.function_name - maximum_retry_attempts = 0 - destination_config { - on_failure { - destination = var.email_sns_topics["viz_lambda_errors"].arn - } - } -} - ################################## ## EGIS Health Checker Function ## ################################## @@ -560,22 +484,8 @@ resource "aws_lambda_function" "viz_initialize_pipeline" { } } -# resource "aws_sns_topic_subscription" "viz_initialize_pipeline_subscriptions" { -# for_each = local.initialize_pipeline_subscriptions -# topic_arn = var.sns_topics["${each.value}"].arn -# protocol = "lambda" -# endpoint = resource.aws_lambda_function.viz_initialize_pipeline.arn -# } - -# resource "aws_lambda_permission" "viz_initialize_pipeline_permissions" { -# for_each = local.initialize_pipeline_subscriptions -# action = "lambda:InvokeFunction" -# function_name = resource.aws_lambda_function.viz_initialize_pipeline.function_name -# principal = "sns.amazonaws.com" -# source_arn = var.sns_topics["${each.value}"].arn -# } - resource "aws_sns_topic_subscription" "viz_initialize_pipeline_subscription_shared_nwm" { + count = var.environment == "ti" ? 0 : 1 provider = aws.sns topic_arn = var.nws_shared_account_nwm_sns protocol = "lambda" @@ -583,6 +493,7 @@ resource "aws_sns_topic_subscription" "viz_initialize_pipeline_subscription_shar } resource "aws_lambda_permission" "viz_initialize_pipeline_permissions_shared_nwm" { + count = var.environment == "ti" ? 0 : 1 action = "lambda:InvokeFunction" function_name = resource.aws_lambda_function.viz_initialize_pipeline.function_name principal = "sns.amazonaws.com" @@ -999,10 +910,6 @@ output "publish_service" { value = aws_lambda_function.viz_publish_service } -output "wrds_api_handler" { - value = aws_lambda_function.viz_wrds_api_handler -} - output "egis_health_checker" { value = aws_lambda_function.egis_health_checker } diff --git a/Core/LAMBDA/viz_functions/viz_initialize_pipeline/lambda_function.py b/Core/LAMBDA/viz_functions/viz_initialize_pipeline/lambda_function.py index 7059300e..bd644c25 100644 --- a/Core/LAMBDA/viz_functions/viz_initialize_pipeline/lambda_function.py +++ b/Core/LAMBDA/viz_functions/viz_initialize_pipeline/lambda_function.py @@ -168,7 +168,7 @@ def __init__(self, start_event, print_init=True): self.configuration = configuration(config, reference_time=self.reference_time, input_bucket=bucket) elif "Records" in self.start_event: # Records in the start_event denotes a SNS trigger of the lambda function. self.invocation_type = "sns" - elif "invocation_type" in self.start_event: # Currently the max_flows and wrds_api_handler lambda functions manually invoke this lambda function and specify a "invocation_type" key in the payload. This is how we identify that. + elif "invocation_type" in self.start_event: # The max_flows lambda function manually invokes this lambda function and includes the "invocation_type" key in the payload. self.invocation_type = "lambda" #TODO: Clean this up to actually pull the value from the payload else: self.invocation_type = "manual" diff --git a/Core/StepFunctions/main.tf b/Core/StepFunctions/main.tf index 7f108bb5..c1f36697 100644 --- a/Core/StepFunctions/main.tf +++ b/Core/StepFunctions/main.tf @@ -109,6 +109,7 @@ resource "aws_sfn_state_machine" "replace_route_step_function" { } resource "aws_cloudwatch_event_target" "check_lambda_every_five_minutes" { + count = var.environment == "ti" ? 0 : 1 rule = var.fifteen_minute_trigger.name target_id = aws_sfn_state_machine.replace_route_step_function.name arn = aws_sfn_state_machine.replace_route_step_function.arn diff --git a/Core/Testing/main.tf b/Core/Testing/main.tf new file mode 100644 index 00000000..7d2d0570 --- /dev/null +++ b/Core/Testing/main.tf @@ -0,0 +1,70 @@ +variable "environment" { + type = "string" +} + +variable "test_data_bucket" { + type = "string" +} + +variable "step_function_arn" { + type = "string" +} + +resource "aws_cloudwatch_event_rule" "detect_test_files" { + name = "hv-vpp-${var.environment}-detect-test-files" + description = "Detects when a new test file has been created" + event_pattern = <\"}, \"object\": {\"key\": \"\"}}}]}" + } + } + ] + } + EOF + } +} + +# Kick off tests in TI +data "aws_s3_objects" "test_nwm_outputs" { + bucket = var.test_data_bucket + prefix = "test_nwm_outputs/" + max_keys = 2000 + depends_on = [var.step_function_arn] +} + +resource "aws_s3_object_copy" "test" { + count = length(data.aws_s3_objects.test_nwm_outputs.keys) + bucket = var.test_data_bucket + source = join("/", [var.test_data_bucket, element(data.aws_s3_objects.test_nwm_outputs.keys, count.index)]) + key = replace(element(data.aws_s3_objects.test_nwm_outputs.keys, count.index), "test_nwm_outputs", formatdate("'common/data/model/com/nwm/prod/nwm.'YYYYDDMM", timestamp())) +} diff --git a/Core/main.tf b/Core/main.tf index fbc07723..a1519a93 100644 --- a/Core/main.tf +++ b/Core/main.tf @@ -707,16 +707,11 @@ module "sync-wrds-location-db" { db_dumps_bucket = module.s3.buckets["deployment"].bucket } -# Kick off tests in TI -data "aws_s3_objects" "test_nwm_outputs" { - bucket = module.s3.buckets["deployment"].bucket - prefix = "test_nwm_outputs/" - depends_on = [] -} - -resource "aws_s3_object_copy" "test" { - count = length(data.aws_s3_objects.test_nwm_outputs.keys) - bucket = module.s3.buckets["deployment"].bucket - key = replace(element(data.aws_s3_objects.test_nwm_outputs.keys, count.index), "test_nwm_outputs", formatdate("YYYYDDMM", timestamp())) - source = element(data.aws_s3_objects.test_nwm_outputs.keys, count.index) +module "testing" { + count = local.env.environment == "ti" ? 1 : 0 + source = "./Testing" + + environment = local.env.environment + test_data_bucket = module.s3.buckets["deployment"].bucket + step_function_arn = module.step-functions.viz_pipeline_step_function.arn }