From 6515e305bb8a187a3078f51cb6a68934f6dac5f4 Mon Sep 17 00:00:00 2001 From: ronsh12 <101520407+ronsh12@users.noreply.github.com> Date: Mon, 4 Dec 2023 12:16:50 +0200 Subject: [PATCH] Added complex queries for bigquery pci_dss --- .../aws/compliance-free/dbt_project.yml | 2 +- ...sql => aws_compliance__pci_dss_v3_2_1.sql} | 0 ...lambda_function_prohibit_public_access.sql | 11 +++- .../aws/macros/s3/deny_http_requests.sql | 33 +++++++++- .../macros/s3/publicly_readable_buckets.sql | 63 ++++++++++++++++++- .../macros/s3/publicly_writable_buckets.sql | 63 ++++++++++++++++++- .../remove_unused_secrets_manager_secrets.sql | 15 ++++- ...ic_rotation_should_rotate_successfully.sql | 15 ++++- ...ated_within_a_specified_number_of_days.sql | 15 ++++- 9 files changed, 208 insertions(+), 9 deletions(-) rename transformations/aws/compliance-premium/models/{aws_compliance__pci_dss_v3.2.1.sql => aws_compliance__pci_dss_v3_2_1.sql} (100%) diff --git a/transformations/aws/compliance-free/dbt_project.yml b/transformations/aws/compliance-free/dbt_project.yml index b5bf7bc18..6d7f2cae1 100644 --- a/transformations/aws/compliance-free/dbt_project.yml +++ b/transformations/aws/compliance-free/dbt_project.yml @@ -6,7 +6,7 @@ version: "1.0.0" config-version: 2 # This setting configures which "profile" dbt uses for this project. -profile: "bigquery_aws" +profile: "aws_compliance" # These configurations specify where dbt should look for different types of files. # The `model-paths` config, for example, states that models in this project can be diff --git a/transformations/aws/compliance-premium/models/aws_compliance__pci_dss_v3.2.1.sql b/transformations/aws/compliance-premium/models/aws_compliance__pci_dss_v3_2_1.sql similarity index 100% rename from transformations/aws/compliance-premium/models/aws_compliance__pci_dss_v3.2.1.sql rename to transformations/aws/compliance-premium/models/aws_compliance__pci_dss_v3_2_1.sql diff --git a/transformations/aws/macros/lambda/lambda_function_prohibit_public_access.sql b/transformations/aws/macros/lambda/lambda_function_prohibit_public_access.sql index 30b92e7ad..7201c121d 100644 --- a/transformations/aws/macros/lambda/lambda_function_prohibit_public_access.sql +++ b/transformations/aws/macros/lambda/lambda_function_prohibit_public_access.sql @@ -39,6 +39,13 @@ select account_id, arn as resource_id, 'fail' as status -- TODO FIXME -from {{ full_table_name("aws_lambda_functions") }} -where 1 = 0 +from {{ full_table_name("aws_lambda_functions") }}, + UNNEST(JSON_QUERY_ARRAY(policy_document.Statement)) AS statement +where JSON_VALUE(statement.Effect) = 'Allow' + and ( + JSON_VALUE(statement.Principal) = '*' + or JSON_VALUE(statement.Principal.AWS) = '*' + + or ( '*' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(statement.Principal.AWS)) ) + ) {% endmacro %} diff --git a/transformations/aws/macros/s3/deny_http_requests.sql b/transformations/aws/macros/s3/deny_http_requests.sql index 171010fa0..5a4b83dd5 100644 --- a/transformations/aws/macros/s3/deny_http_requests.sql +++ b/transformations/aws/macros/s3/deny_http_requests.sql @@ -76,4 +76,35 @@ where {% endmacro %} {% macro default__deny_http_requests(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__deny_http_requests(framework, check_id) %} +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'S3 buckets should deny non-HTTPS requests' as title, + account_id, + arn as resource_id, + 'fail' as status +from + {{ full_table_name("aws_s3_buckets") }} +where + arn not in ( + -- Find all buckets that have a bucket policy that denies non-SSL requests + select arn + from (select aws_s3_buckets.arn, + statements, + statements.Principal as principals + from {{ full_table_name("aws_s3_buckets") }}, + UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements + where JSON_VALUE(statements.Effect) = '"Deny"') as foo, + UNNEST(JSON_QUERY_ARRAY(statements, '$.Condition.Bool."aws:securetransport"')) as ssl + where JSON_VALUE(principals) = '"*"' + or ( + 'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals)) + and ( + JSON_VALUE(principals.AWS) = '"*"' + or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS)) + )) + and CAST( JSON_VALUE(ssl) AS BOOL) = FALSE + ) +{% endmacro %} \ No newline at end of file diff --git a/transformations/aws/macros/s3/publicly_readable_buckets.sql b/transformations/aws/macros/s3/publicly_readable_buckets.sql index d8b591b2e..2f6ffcb5e 100644 --- a/transformations/aws/macros/s3/publicly_readable_buckets.sql +++ b/transformations/aws/macros/s3/publicly_readable_buckets.sql @@ -133,4 +133,65 @@ where {% endmacro %} {% macro default__publicly_readable_buckets(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__publicly_readable_buckets(framework, check_id) %} +with policy_allow_public as ( + select + arn, + count(*) as statement_count + from + ( + select + aws_s3_buckets.arn, + statements.Principal as principals + from + {{ full_table_name("aws_s3_buckets") }}, + UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements + where + JSON_VALUE(statements.Effect) = '"Allow"' + ) as foo + where + JSON_VALUE(principals) = '"*"' + or ( + 'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals)) + and ( + JSON_VALUE(principals.AWS) = '"*"' + or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS)) + ) + ) + group by + arn +) +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'S3 buckets should prohibit public write access' as title, + aws_s3_buckets.account_id, + aws_s3_buckets.arn as resource_id, + 'fail' as status -- TODO FIXME +from + -- Find and join all bucket ACLS that give a public write access + {{ full_table_name("aws_s3_buckets") }} +left join + {{ full_table_name("aws_s3_bucket_grants") }} on + aws_s3_buckets.arn = aws_s3_bucket_grants.bucket_arn +-- Find all statements that could give public allow access +-- Statements that give public access have 1) Effect == Allow 2) One of the following principal: +-- Principal = {"AWS": "*"} +-- Principal = {"AWS": ["arn:aws:iam::12345678910:root", "*"]} +-- Principal = "*" +left join policy_allow_public on + aws_s3_buckets.arn = policy_allow_public.arn +where + ( + aws_s3_buckets.block_public_acls != TRUE + and ( + JSON_VALUE(grantee.URI) = 'http://acs.amazonaws.com/groups/global/AllUsers' + and permission in ('READ_ACP', 'FULL_CONTROL') + ) + ) + or ( + aws_s3_buckets.block_public_policy != TRUE + and policy_allow_public.statement_count > 0 + ) +{% endmacro %} \ No newline at end of file diff --git a/transformations/aws/macros/s3/publicly_writable_buckets.sql b/transformations/aws/macros/s3/publicly_writable_buckets.sql index ebc525cd0..f1c17415b 100644 --- a/transformations/aws/macros/s3/publicly_writable_buckets.sql +++ b/transformations/aws/macros/s3/publicly_writable_buckets.sql @@ -133,4 +133,65 @@ where {% endmacro %} {% macro default__publicly_writable_buckets(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__publicly_writable_buckets(framework, check_id) %} +with policy_allow_public as ( + select + arn, + count(*) as statement_count + from + ( + select + aws_s3_buckets.arn, + statements.Principal as principals + from + {{ full_table_name("aws_s3_buckets") }}, + UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements + where + JSON_VALUE(statements.Effect) = '"Allow"' + ) as foo + where + JSON_VALUE(principals) = '"*"' + or ( + 'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals)) + and ( + JSON_VALUE(principals.AWS) = '"*"' + or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS)) + ) + ) + group by + arn +) +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'S3 buckets should prohibit public write access' as title, + aws_s3_buckets.account_id, + aws_s3_buckets.arn as resource_id, + 'fail' as status -- TODO FIXME +from + -- Find and join all bucket ACLS that give a public write access + {{ full_table_name("aws_s3_buckets") }} +left join + {{ full_table_name("aws_s3_bucket_grants") }} on + aws_s3_buckets.arn = aws_s3_bucket_grants.bucket_arn +-- Find all statements that could give public allow access +-- Statements that give public access have 1) Effect == Allow 2) One of the following principal: +-- Principal = {"AWS": "*"} +-- Principal = {"AWS": ["arn:aws:iam::12345678910:root", "*"]} +-- Principal = "*" +left join policy_allow_public on + aws_s3_buckets.arn = policy_allow_public.arn +where + ( + aws_s3_buckets.block_public_acls != TRUE + and ( + JSON_VALUE(grantee.URI) = 'http://acs.amazonaws.com/groups/global/AllUsers' + and permission in ('WRITE_ACP', 'FULL_CONTROL') + ) + ) + or ( + aws_s3_buckets.block_public_policy != TRUE + and policy_allow_public.statement_count > 0 + ) +{% endmacro %} \ No newline at end of file diff --git a/transformations/aws/macros/secretsmanager/remove_unused_secrets_manager_secrets.sql b/transformations/aws/macros/secretsmanager/remove_unused_secrets_manager_secrets.sql index 7617a140d..ca0ac8c43 100644 --- a/transformations/aws/macros/secretsmanager/remove_unused_secrets_manager_secrets.sql +++ b/transformations/aws/macros/secretsmanager/remove_unused_secrets_manager_secrets.sql @@ -31,4 +31,17 @@ from aws_secretsmanager_secrets {% endmacro %} {% macro default__remove_unused_secrets_manager_secrets(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__remove_unused_secrets_manager_secrets(framework, check_id) %} +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'Remove unused Secrets Manager secrets' as title, + account_id, + arn as resource_id, + case when + (last_accessed_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY)) + or (last_accessed_date is not null and last_accessed_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY)) + then 'fail' else 'pass' end as status +from {{ full_table_name("aws_secretsmanager_secrets") }} +{% endmacro %} \ No newline at end of file diff --git a/transformations/aws/macros/secretsmanager/secrets_configured_with_automatic_rotation_should_rotate_successfully.sql b/transformations/aws/macros/secretsmanager/secrets_configured_with_automatic_rotation_should_rotate_successfully.sql index 0c0f0b1a9..1324678d3 100644 --- a/transformations/aws/macros/secretsmanager/secrets_configured_with_automatic_rotation_should_rotate_successfully.sql +++ b/transformations/aws/macros/secretsmanager/secrets_configured_with_automatic_rotation_should_rotate_successfully.sql @@ -32,4 +32,17 @@ select {% endmacro %} {% macro default__secrets_configured_with_automatic_rotation_should_rotate_successfully(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__secrets_configured_with_automatic_rotation_should_rotate_successfully(framework, check_id) %} +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'Secrets Manager secrets configured with automatic rotation should rotate successfully' as title, + account_id, + arn as resource_id, + case when + (last_rotated_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), (INTERVAL 1*(CAST(JSON_VALUE(rotation_rules.AutomaticallyAfterDays) AS INT64)) DAY) )) + or (last_rotated_date is not null and last_rotated_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), (INTERVAL 1*(CAST(JSON_VALUE(rotation_rules.AutomaticallyAfterDays) AS INT64)) DAY) )) + then 'fail' else 'pass' end as status + from {{ full_table_name("aws_secretsmanager_secrets") }} +{% endmacro %} \ No newline at end of file diff --git a/transformations/aws/macros/secretsmanager/secrets_should_be_rotated_within_a_specified_number_of_days.sql b/transformations/aws/macros/secretsmanager/secrets_should_be_rotated_within_a_specified_number_of_days.sql index 946e14a50..d1fcee11a 100644 --- a/transformations/aws/macros/secretsmanager/secrets_should_be_rotated_within_a_specified_number_of_days.sql +++ b/transformations/aws/macros/secretsmanager/secrets_should_be_rotated_within_a_specified_number_of_days.sql @@ -31,4 +31,17 @@ from aws_secretsmanager_secrets {% endmacro %} {% macro default__secrets_should_be_rotated_within_a_specified_number_of_days(framework, check_id) %}{% endmacro %} - \ No newline at end of file + +{% macro bigquery__secrets_should_be_rotated_within_a_specified_number_of_days(framework, check_id) %} +select + '{{framework}}' as framework, + '{{check_id}}' as check_id, + 'Secrets Manager secrets should be rotated within a specified number of days' as title, + account_id, + arn as resource_id, + case when + (last_rotated_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY)) + or (last_rotated_date is not null and last_rotated_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY)) + then 'fail' else 'pass' end as status +from {{ full_table_name("aws_secretsmanager_secrets") }} +{% endmacro %} \ No newline at end of file