Skip to content

Commit

Permalink
Added complex queries for bigquery pci_dss
Browse files Browse the repository at this point in the history
  • Loading branch information
ronsh12 committed Dec 4, 2023
1 parent 9eb5ba4 commit f536a1b
Show file tree
Hide file tree
Showing 9 changed files with 208 additions and 9 deletions.
2 changes: 1 addition & 1 deletion transformations/aws/compliance-premium/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ version: '1.0.0'
config-version: 2

# This setting configures which "profile" dbt uses for this project.
profile: 'aws_compliance'
profile: 'bigquery_aws'

# These configurations specify where dbt should look for different types of files.
# The `model-paths` config, for example, states that models in this project can be
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,13 @@ select
account_id,
arn as resource_id,
'fail' as status -- TODO FIXME
from {{ full_table_name("aws_lambda_functions") }}
where 1 = 0
from {{ full_table_name("aws_lambda_functions") }},
UNNEST(JSON_QUERY_ARRAY(policy_document.Statement)) AS statement
where JSON_VALUE(statement.Effect) = 'Allow'
and (
JSON_VALUE(statement.Principal) = '*'
or JSON_VALUE(statement.Principal.AWS) = '*'

or ( '*' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(statement.Principal.AWS)) )
)
{% endmacro %}
33 changes: 32 additions & 1 deletion transformations/aws/macros/s3/deny_http_requests.sql
Original file line number Diff line number Diff line change
Expand Up @@ -76,4 +76,35 @@ where
{% endmacro %}

{% macro default__deny_http_requests(framework, check_id) %}{% endmacro %}


{% macro bigquery__deny_http_requests(framework, check_id) %}
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'S3 buckets should deny non-HTTPS requests' as title,
account_id,
arn as resource_id,
'fail' as status
from
{{ full_table_name("aws_s3_buckets") }}
where
arn not in (
-- Find all buckets that have a bucket policy that denies non-SSL requests
select arn
from (select aws_s3_buckets.arn,
statements,
statements.Principal as principals
from {{ full_table_name("aws_s3_buckets") }},
UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements
where JSON_VALUE(statements.Effect) = '"Deny"') as foo,
UNNEST(JSON_QUERY_ARRAY(statements, '$.Condition.Bool."aws:securetransport"')) as ssl
where JSON_VALUE(principals) = '"*"'
or (
'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals))
and (
JSON_VALUE(principals.AWS) = '"*"'
or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS))
))
and CAST( JSON_VALUE(ssl) AS BOOL) = FALSE
)
{% endmacro %}
63 changes: 62 additions & 1 deletion transformations/aws/macros/s3/publicly_readable_buckets.sql
Original file line number Diff line number Diff line change
Expand Up @@ -133,4 +133,65 @@ where
{% endmacro %}

{% macro default__publicly_readable_buckets(framework, check_id) %}{% endmacro %}


{% macro bigquery__publicly_readable_buckets(framework, check_id) %}
with policy_allow_public as (
select
arn,
count(*) as statement_count
from
(
select
aws_s3_buckets.arn,
statements.Principal as principals
from
{{ full_table_name("aws_s3_buckets") }},
UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements
where
JSON_VALUE(statements.Effect) = '"Allow"'
) as foo
where
JSON_VALUE(principals) = '"*"'
or (
'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals))
and (
JSON_VALUE(principals.AWS) = '"*"'
or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS))
)
)
group by
arn
)
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'S3 buckets should prohibit public write access' as title,
aws_s3_buckets.account_id,
aws_s3_buckets.arn as resource_id,
'fail' as status -- TODO FIXME
from
-- Find and join all bucket ACLS that give a public write access
{{ full_table_name("aws_s3_buckets") }}
left join
{{ full_table_name("aws_s3_bucket_grants") }} on
aws_s3_buckets.arn = aws_s3_bucket_grants.bucket_arn
-- Find all statements that could give public allow access
-- Statements that give public access have 1) Effect == Allow 2) One of the following principal:
-- Principal = {"AWS": "*"}
-- Principal = {"AWS": ["arn:aws:iam::12345678910:root", "*"]}
-- Principal = "*"
left join policy_allow_public on
aws_s3_buckets.arn = policy_allow_public.arn
where
(
aws_s3_buckets.block_public_acls != TRUE
and (
JSON_VALUE(grantee.URI) = 'http://acs.amazonaws.com/groups/global/AllUsers'
and permission in ('READ_ACP', 'FULL_CONTROL')
)
)
or (
aws_s3_buckets.block_public_policy != TRUE
and policy_allow_public.statement_count > 0
)
{% endmacro %}
63 changes: 62 additions & 1 deletion transformations/aws/macros/s3/publicly_writable_buckets.sql
Original file line number Diff line number Diff line change
Expand Up @@ -133,4 +133,65 @@ where
{% endmacro %}

{% macro default__publicly_writable_buckets(framework, check_id) %}{% endmacro %}


{% macro bigquery__publicly_writable_buckets(framework, check_id) %}
with policy_allow_public as (
select
arn,
count(*) as statement_count
from
(
select
aws_s3_buckets.arn,
statements.Principal as principals
from
{{ full_table_name("aws_s3_buckets") }},
UNNEST(JSON_QUERY_ARRAY(policy.Statement)) as statements
where
JSON_VALUE(statements.Effect) = '"Allow"'
) as foo
where
JSON_VALUE(principals) = '"*"'
or (
'AWS' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals))
and (
JSON_VALUE(principals.AWS) = '"*"'
or '"*"' IN UNNEST(JSON_EXTRACT_STRING_ARRAY(principals.AWS))
)
)
group by
arn
)
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'S3 buckets should prohibit public write access' as title,
aws_s3_buckets.account_id,
aws_s3_buckets.arn as resource_id,
'fail' as status -- TODO FIXME
from
-- Find and join all bucket ACLS that give a public write access
{{ full_table_name("aws_s3_buckets") }}
left join
{{ full_table_name("aws_s3_bucket_grants") }} on
aws_s3_buckets.arn = aws_s3_bucket_grants.bucket_arn
-- Find all statements that could give public allow access
-- Statements that give public access have 1) Effect == Allow 2) One of the following principal:
-- Principal = {"AWS": "*"}
-- Principal = {"AWS": ["arn:aws:iam::12345678910:root", "*"]}
-- Principal = "*"
left join policy_allow_public on
aws_s3_buckets.arn = policy_allow_public.arn
where
(
aws_s3_buckets.block_public_acls != TRUE
and (
JSON_VALUE(grantee.URI) = 'http://acs.amazonaws.com/groups/global/AllUsers'
and permission in ('WRITE_ACP', 'FULL_CONTROL')
)
)
or (
aws_s3_buckets.block_public_policy != TRUE
and policy_allow_public.statement_count > 0
)
{% endmacro %}
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,17 @@ from aws_secretsmanager_secrets
{% endmacro %}

{% macro default__remove_unused_secrets_manager_secrets(framework, check_id) %}{% endmacro %}


{% macro bigquery__remove_unused_secrets_manager_secrets(framework, check_id) %}
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'Remove unused Secrets Manager secrets' as title,
account_id,
arn as resource_id,
case when
(last_accessed_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY))
or (last_accessed_date is not null and last_accessed_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY))
then 'fail' else 'pass' end as status
from {{ full_table_name("aws_secretsmanager_secrets") }}
{% endmacro %}
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,17 @@ select
{% endmacro %}

{% macro default__secrets_configured_with_automatic_rotation_should_rotate_successfully(framework, check_id) %}{% endmacro %}


{% macro bigquery__secrets_configured_with_automatic_rotation_should_rotate_successfully(framework, check_id) %}
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'Secrets Manager secrets configured with automatic rotation should rotate successfully' as title,
account_id,
arn as resource_id,
case when
(last_rotated_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), (INTERVAL 1*(CAST(JSON_VALUE(rotation_rules.AutomaticallyAfterDays) AS INT64)) DAY) ))
or (last_rotated_date is not null and last_rotated_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), (INTERVAL 1*(CAST(JSON_VALUE(rotation_rules.AutomaticallyAfterDays) AS INT64)) DAY) ))
then 'fail' else 'pass' end as status
from {{ full_table_name("aws_secretsmanager_secrets") }}
{% endmacro %}
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,17 @@ from aws_secretsmanager_secrets
{% endmacro %}

{% macro default__secrets_should_be_rotated_within_a_specified_number_of_days(framework, check_id) %}{% endmacro %}


{% macro bigquery__secrets_should_be_rotated_within_a_specified_number_of_days(framework, check_id) %}
select
'{{framework}}' as framework,
'{{check_id}}' as check_id,
'Secrets Manager secrets should be rotated within a specified number of days' as title,
account_id,
arn as resource_id,
case when
(last_rotated_date is null and created_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY))
or (last_rotated_date is not null and last_rotated_date < TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 90 DAY))
then 'fail' else 'pass' end as status
from {{ full_table_name("aws_secretsmanager_secrets") }}
{% endmacro %}

0 comments on commit f536a1b

Please sign in to comment.