diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 35b58df243f..e1f6ac1f69b 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -5,7 +5,6 @@ concurrency: on: pull_request_target: types: [opened, synchronize, reopened, closed] - env: GHP_BASE_URL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }} @@ -22,6 +21,8 @@ jobs: intersphinx-links: | amazon_aws:https://ansible-collections.github.io/amazon.aws/branch/main/ ansible_devel:https://docs.ansible.com/ansible-core/devel/ + artifact-name: ${{ github.event.repository.name }}_validate_docs_${{ github.event.pull_request.head.sha }} + build-docs: permissions: diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d2ed998bf35..b47d79fe4a2 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,29 @@ community.aws Release Notes .. contents:: Topics +v7.2.0 +====== + +Release Summary +--------------- + +This release includes a new module ``dynamodb_table_info``, new features for the ``glue_job`` and ``msk_cluster`` modules, and a bugfix for the ``aws_ssm`` connection plugin. + +Minor Changes +------------- + +- glue_job - add support for 2 new instance types which are G.4X and G.8X (https://github.com/ansible-collections/community.aws/pull/2048). +- msk_cluster - Support for additional ``m5`` and ``m7g`` types of MSK clusters (https://github.com/ansible-collections/community.aws/pull/1947). + +Bugfixes +-------- + +- ssm(connection) - fix bucket region logic when region is ``us-east-1`` (https://github.com/ansible-collections/community.aws/pull/1908). + +New Modules +----------- + +- dynamodb_table_info - Returns information about a Dynamo DB table v7.1.0 ====== @@ -134,7 +157,6 @@ This release brings some new plugins and features. Several bugfixes, breaking ch The community.aws collection has dropped support for ``botocore<1.25.0`` and ``boto3<1.22.0``. Support for Python 3.6 has also been dropped. - Minor Changes ------------- @@ -207,7 +229,6 @@ Release Summary This release contains a number of bugfixes for various modules, as well as new features for the ``ec2_launch_template`` and ``msk_cluster`` modules. This is the last planned minor release prior to the release of version 6.0.0. - Minor Changes ------------- @@ -303,7 +324,6 @@ modules. As well as improvements to the ``ecs_cluster``, ``ec2_ecr``, ``ecs_service``, ``iam_role`` and ``ssm_parameter`` plugins. - Minor Changes ------------- @@ -369,7 +389,6 @@ Support for ``ansible-core<2.11`` has also been dropped. This release also brings some new features, bugfixes, breaking changes and deprecated features. - Minor Changes ------------- @@ -531,7 +550,6 @@ Release Summary This release contains a minor bugfix for the ``sns_topic`` module as well as corrections to the documentation for various modules. This is the last planned release of the 4.x series. - Bugfixes -------- @@ -889,7 +907,6 @@ Release Summary Following the release of community.aws 5.0.0, 3.6.0 is a bugfix release and the final planned release for the 3.x series. - Minor Changes ------------- diff --git a/README.md b/README.md index bcabfb701e2..8d6a693621f 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ AWS related modules and plugins supported by the Ansible Cloud team are in the [ ## Ansible version compatibility -Tested with the Ansible Core >= 2.12.0 versions, and the current development version of Ansible. Ansible Core versions before 2.12.0 are not supported. +Tested with the Ansible Core >= 2.14.0 versions, and the current development version of Ansible. Ansible Core versions before 2.14.0 are not supported. Use community.aws 4.x.y if you are using Ansible 2.9 or Ansible Core 2.10. diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 37a7d7341d1..b4e65e9ab1b 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -3734,3 +3734,24 @@ releases: - release.yml - ssm-fedora34.yml release_date: '2024-01-10' + 7.2.0: + changes: + bugfixes: + - ssm(connection) - fix bucket region logic when region is ``us-east-1`` (https://github.com/ansible-collections/community.aws/pull/1908). + minor_changes: + - glue_job - add support for 2 new instance types which are G.4X and G.8X (https://github.com/ansible-collections/community.aws/pull/2048). + - msk_cluster - Support for additional ``m5`` and ``m7g`` types of MSK clusters + (https://github.com/ansible-collections/community.aws/pull/1947). + release_summary: This release includes a new module ``dynamodb_table_info``, + new features for the ``glue_job`` and ``msk_cluster`` modules, and a bugfix + for the ``aws_ssm`` connection plugin. + fragments: + - 1908-fix_find_out_bucket_region_logic.yml + - 1947-add_support_msk_addtinal_type.yml + - 20240402-lambda-test-runtime.yml + - 2048-add-new-instance-types-in-gluejob.yaml + modules: + - description: Returns information about a Dynamo DB table + name: dynamodb_table_info + namespace: '' + release_date: '2024-04-05' diff --git a/changelogs/fragments/1908-fix_find_out_bucket_region_logic.yml b/changelogs/fragments/1908-fix_find_out_bucket_region_logic.yml deleted file mode 100644 index eea7d2a6025..00000000000 --- a/changelogs/fragments/1908-fix_find_out_bucket_region_logic.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - ssm(connection) - fix bucket region logic when region is ``us-east-1`` (https://github.com/ansible-collections/community.aws/pull/1908) diff --git a/changelogs/fragments/1947-add_support_msk_addtinal_type.yml b/changelogs/fragments/1947-add_support_msk_addtinal_type.yml deleted file mode 100644 index 4c55f7b78f6..00000000000 --- a/changelogs/fragments/1947-add_support_msk_addtinal_type.yml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: -- msk_cluster - Support for additional ``m5`` and ``m7g`` types of MSK clusters (https://github.com/ansible-collections/community.aws/pull/1947). \ No newline at end of file diff --git a/changelogs/fragments/20240402-lambda-test-runtime.yml b/changelogs/fragments/20240402-lambda-test-runtime.yml deleted file mode 100644 index ef86b61a42e..00000000000 --- a/changelogs/fragments/20240402-lambda-test-runtime.yml +++ /dev/null @@ -1,2 +0,0 @@ -trivial: - - integration tests - update lambda ``runtime`` parameter to python3.12 (https://github.com/ansible-collections/community.aws/pull/2065). diff --git a/changelogs/fragments/20240408-efs-sanity_fix.yml b/changelogs/fragments/20240408-efs-sanity_fix.yml new file mode 100644 index 00000000000..f8be70f183b --- /dev/null +++ b/changelogs/fragments/20240408-efs-sanity_fix.yml @@ -0,0 +1,2 @@ +trivial: + - efs - Use ``yield from`` in ordr to fix sanity errors. diff --git a/changelogs/fragments/2048-add-new-instance-types-in-gluejob.yaml b/changelogs/fragments/2048-add-new-instance-types-in-gluejob.yaml deleted file mode 100644 index 49119a5da37..00000000000 --- a/changelogs/fragments/2048-add-new-instance-types-in-gluejob.yaml +++ /dev/null @@ -1,2 +0,0 @@ -minor_changes: - - glue_job - add support for 2 new instance types which are G.4X and G.8X (https://github.com/ansible-collections/community.aws/pull/2048). diff --git a/changelogs/fragments/8.0.0-increase-ansible-core-version.yml b/changelogs/fragments/8.0.0-increase-ansible-core-version.yml new file mode 100644 index 00000000000..e3653bb7590 --- /dev/null +++ b/changelogs/fragments/8.0.0-increase-ansible-core-version.yml @@ -0,0 +1,2 @@ +breaking_changes: + - community.aws collection - Support for ansible-core < 2.14 has been dropped (https://github.com/ansible-collections/community.aws/pull/2074). diff --git a/changelogs/fragments/boto3_equals.yml b/changelogs/fragments/boto3_equals.yml new file mode 100644 index 00000000000..4566bc77b04 --- /dev/null +++ b/changelogs/fragments/boto3_equals.yml @@ -0,0 +1,3 @@ +trivial: +- glue_connection - stop passing ``boto3`` into ``get_ec2_security_group_ids_from_names()`` it is no longer used. +- autoscaling_launch_config - stop passing ``boto3`` into ``get_ec2_security_group_ids_from_names()`` it is no longer used. diff --git a/meta/runtime.yml b/meta/runtime.yml index 2d17de54a9a..4b577850543 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,5 +1,5 @@ --- -requires_ansible: ">=2.12.0" +requires_ansible: '>=2.14.0' action_groups: aws: - accessanalyzer_validate_policy_info diff --git a/plugins/modules/autoscaling_launch_config.py b/plugins/modules/autoscaling_launch_config.py index 78b7ee23352..cd411e57606 100644 --- a/plugins/modules/autoscaling_launch_config.py +++ b/plugins/modules/autoscaling_launch_config.py @@ -533,7 +533,7 @@ def create_launch_config(connection, module): module.fail_json_aws(e, msg="Failed to connect to AWS") try: security_groups = get_ec2_security_group_ids_from_names( - module.params.get("security_groups"), ec2_connection, vpc_id=vpc_id, boto3=True + module.params.get("security_groups"), ec2_connection, vpc_id=vpc_id ) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg="Failed to get Security Group IDs") diff --git a/plugins/modules/efs.py b/plugins/modules/efs.py index 6b9390f2b4d..32992c4a3c2 100644 --- a/plugins/modules/efs.py +++ b/plugins/modules/efs.py @@ -653,8 +653,7 @@ def iterate_all(attr, map_method, **kwargs): while True: try: data = map_method(**args) - for elm in data[attr]: - yield elm + yield from data[attr] if "NextMarker" in data: args["Marker"] = data["Nextmarker"] continue diff --git a/plugins/modules/glue_connection.py b/plugins/modules/glue_connection.py index 18039a8616d..ecfbd1a90dd 100644 --- a/plugins/modules/glue_connection.py +++ b/plugins/modules/glue_connection.py @@ -298,9 +298,7 @@ def create_or_update_glue_connection(connection, connection_ec2, module, glue_co params["ConnectionInput"]["PhysicalConnectionRequirements"] = dict() if module.params.get("security_groups") is not None: # Get security group IDs from names - security_group_ids = get_ec2_security_group_ids_from_names( - module.params.get("security_groups"), connection_ec2, boto3=True - ) + security_group_ids = get_ec2_security_group_ids_from_names(module.params.get("security_groups"), connection_ec2) params["ConnectionInput"]["PhysicalConnectionRequirements"]["SecurityGroupIdList"] = security_group_ids if module.params.get("subnet_id") is not None: params["ConnectionInput"]["PhysicalConnectionRequirements"]["SubnetId"] = module.params.get("subnet_id") diff --git a/tests/integration/targets/config/tasks/main.yaml b/tests/integration/targets/config/tasks/main.yaml index 244c4b29b7b..54037080398 100644 --- a/tests/integration/targets/config/tasks/main.yaml +++ b/tests/integration/targets/config/tasks/main.yaml @@ -173,7 +173,7 @@ config_recorder: name: '{{ resource_prefix }}-recorder' state: present - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" recording_group: all_supported: true include_global_types: true @@ -236,7 +236,7 @@ account_sources: [] organization_source: all_aws_regions: true - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" register: output - name: assert success @@ -251,7 +251,7 @@ account_sources: [] organization_source: all_aws_regions: true - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" register: output - name: assert not changed @@ -266,7 +266,7 @@ config_recorder: name: '{{ resource_prefix }}-recorder' state: present - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" recording_group: all_supported: false include_global_types: false @@ -348,7 +348,7 @@ all_aws_regions: false aws_regions: - '{{ aws_region }}' - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" register: output - name: assert success @@ -365,7 +365,7 @@ all_aws_regions: false aws_regions: - '{{ aws_region }}' - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" register: output - name: assert success @@ -380,7 +380,7 @@ config_recorder: name: '{{ resource_prefix }}-recorder' state: present - role_arn: "{{ config_iam_role.arn }}" + role_arn: "{{ config_iam_role.iam_role.arn }}" recording_group: all_supported: false include_global_types: false diff --git a/tests/integration/targets/ec2_launch_template/tasks/iam_instance_role.yml b/tests/integration/targets/ec2_launch_template/tasks/iam_instance_role.yml index c26b96d69c0..ad797fabb79 100644 --- a/tests/integration/targets/ec2_launch_template/tasks/iam_instance_role.yml +++ b/tests/integration/targets/ec2_launch_template/tasks/iam_instance_role.yml @@ -29,7 +29,7 @@ - assert: that: - - 'template_with_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.iam_role.arn.replace(":role/", ":instance-profile/")' - name: Create template again, with no change to instance_role ec2_launch_template: @@ -41,7 +41,7 @@ - assert: that: - - 'template_with_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.iam_role.arn.replace(":role/", ":instance-profile/")' - 'template_with_role is not changed' - name: Update instance with new instance_role @@ -54,8 +54,8 @@ - assert: that: - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.arn.replace(":role/", ":instance-profile/")' - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.iam_role.arn.replace(":role/", ":instance-profile/")' - 'template_with_role.default_template.version_number < template_with_updated_role.default_template.version_number' - 'template_with_updated_role is changed' - 'template_with_updated_role is not failed' @@ -71,7 +71,7 @@ - assert: that: - 'template_with_updated_role is not changed' - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role_2.iam_role.arn.replace(":role/", ":instance-profile/")' - name: Update instance with original instance_role (pass profile ARN) ec2_launch_template: @@ -79,13 +79,13 @@ image_id: "{{ ec2_ami_id }}" instance_type: t2.micro # By default an instance profile will be created with the same name as the role - iam_instance_profile: '{{ iam_role.arn.replace(":role/", ":instance-profile/") }}' + iam_instance_profile: '{{ iam_role.iam_role.arn.replace(":role/", ":instance-profile/") }}' register: template_with_updated_role - assert: that: - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.arn.replace(":role/", ":instance-profile/")' - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.iam_role.arn.replace(":role/", ":instance-profile/")' - 'template_with_role.default_template.version_number < template_with_updated_role.default_template.version_number' - 'template_with_updated_role is changed' - 'template_with_updated_role is not failed' @@ -95,13 +95,13 @@ name: "{{ resource_prefix }}-test-instance-role" image_id: "{{ ec2_ami_id }}" instance_type: t2.micro - iam_instance_profile: '{{ iam_role.arn.replace(":role/", ":instance-profile/") }}' + iam_instance_profile: '{{ iam_role.iam_role.arn.replace(":role/", ":instance-profile/") }}' register: template_with_updated_role - assert: that: - 'template_with_updated_role is not changed' - - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.arn.replace(":role/", ":instance-profile/")' + - 'template_with_updated_role.default_template.launch_template_data.iam_instance_profile.arn == iam_role.iam_role.arn.replace(":role/", ":instance-profile/")' always: - name: delete launch template diff --git a/tests/integration/targets/ecs_cluster/tasks/20_ecs_service.yml b/tests/integration/targets/ecs_cluster/tasks/20_ecs_service.yml index 3c4bbcb28c7..e04cfe6acd5 100644 --- a/tests/integration/targets/ecs_cluster/tasks/20_ecs_service.yml +++ b/tests/integration/targets/ecs_cluster/tasks/20_ecs_service.yml @@ -547,7 +547,7 @@ >> "rolloutStateReason": "ECS deployment ecs-svc/5156684577543126023 in progress.", constraints and placement strategies are only changeable if the rollout state is "COMPLETED" - + a) ecs_service has currently no waiter function. so this is a DIY waiter b) the state reached never "COMPLETED" because something if wrong with the ECS EC2 Instances or the network setup. The EC2 instance never arrived as an active instance in the cluster. @@ -555,9 +555,9 @@ >> no container instance met all of its requirements. Reason: No Container Instances were found in your cluster. >> For more information, see the Troubleshooting section of the Amazon ECS Developer Guide. >> ec2_instance networking does not work correctly, no instance available for the cluster - + Because all of this, all following tasks, that test the change of a constraint or placement stragegy are - using `force_new_deployment: true`. That ignores a) and b). + using `force_new_deployment: true`. That ignores a) and b). ignore_errors: true ecs_service_info: name: "{{ ecs_service_name }}-constraint" @@ -736,7 +736,7 @@ launch_type: FARGATE cpu: 512 memory: 1024 - execution_role_arn: "{{ iam_execution_role.arn }}" + execution_role_arn: "{{ iam_execution_role.iam_role.arn }}" state: present vars: ecs_task_host_port: 8080 @@ -750,7 +750,7 @@ launch_type: EC2 cpu: 512 memory: 1024 - execution_role_arn: "{{ iam_execution_role.arn }}" + execution_role_arn: "{{ iam_execution_role.iam_role.arn }}" state: present vars: ecs_task_host_port: 8080 @@ -916,7 +916,7 @@ launch_type: FARGATE cpu: 512 memory: 1024 - execution_role_arn: "{{ iam_execution_role.arn }}" + execution_role_arn: "{{ iam_execution_role.iam_role.arn }}" state: present runtime_platform: cpuArchitecture: "ARM64" @@ -938,7 +938,7 @@ launch_type: FARGATE cpu: 512 memory: 1024 - execution_role_arn: "{{ iam_execution_role.arn }}" + execution_role_arn: "{{ iam_execution_role.iam_role.arn }}" state: present runtime_platform: cpuArchitecture: "ARM64" @@ -960,7 +960,7 @@ launch_type: FARGATE cpu: 512 memory: 1024 - execution_role_arn: "{{ iam_execution_role.arn }}" + execution_role_arn: "{{ iam_execution_role.iam_role.arn }}" state: present runtime_platform: cpuArchitecture: "ARM64" diff --git a/tests/integration/targets/eks_cluster/tasks/full_test.yml b/tests/integration/targets/eks_cluster/tasks/full_test.yml index 71cc1fc87e5..d3f7dfbe6fd 100644 --- a/tests/integration/targets/eks_cluster/tasks/full_test.yml +++ b/tests/integration/targets/eks_cluster/tasks/full_test.yml @@ -79,7 +79,7 @@ name: "{{ eks_cluster_name }}" security_groups: "{{ eks_security_groups | map(attribute='name') }}" subnets: "{{ setup_subnets.results | map(attribute='subnet.id') }}" - role_arn: "{{ iam_role.arn }}" + role_arn: "{{ iam_role.iam_role.arn }}" tags: Name: "{{ resource_prefix }}" another: foobar @@ -97,7 +97,7 @@ name: "{{ eks_cluster_name }}" security_groups: "{{ eks_security_groups | map(attribute='name') }}" subnets: "{{ setup_subnets.results | map(attribute='subnet.id') }}" - role_arn: "{{ iam_role.arn }}" + role_arn: "{{ iam_role.iam_role.arn }}" wait: yes register: eks_create @@ -117,7 +117,7 @@ name: "{{ eks_cluster_name }}" security_groups: "{{ setup_security_groups.results | map(attribute='group_id') }}" subnets: "{{ setup_subnets.results | map(attribute='subnet.id') }}" - role_arn: "{{ iam_role.arn }}" + role_arn: "{{ iam_role.iam_role.arn }}" register: eks_create - name: check that EKS cluster did not change @@ -143,7 +143,7 @@ name: "{{ eks_cluster_name }}" security_groups: "{{ eks_security_groups | map(attribute='name') }}" subnets: "{{ setup_subnets.results | map(attribute='subnet.id') }}" - role_arn: "{{ iam_role.arn }}" + role_arn: "{{ iam_role.iam_role.arn }}" wait: yes register: eks_create @@ -169,7 +169,7 @@ name: "{{ eks_cluster_short_name }}" security_groups: "{{ eks_security_groups | map(attribute='name') }}" subnets: "{{ setup_subnets.results | map(attribute='subnet.id') }}" - role_arn: "{{ iam_role.arn }}" + role_arn: "{{ iam_role.iam_role.arn }}" register: eks_create - name: check that EKS cluster was created with short name diff --git a/tests/integration/targets/eks_fargate_profile/tasks/create_eks_cluster.yml b/tests/integration/targets/eks_fargate_profile/tasks/create_eks_cluster.yml index 48fbbef8017..1402ad0a137 100644 --- a/tests/integration/targets/eks_fargate_profile/tasks/create_eks_cluster.yml +++ b/tests/integration/targets/eks_fargate_profile/tasks/create_eks_cluster.yml @@ -87,7 +87,7 @@ name: '{{ eks_cluster_name }}' security_groups: '{{ eks_security_groups | map(attribute=''name'') }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' - role_arn: '{{ iam_role.arn }}' + role_arn: '{{ iam_role.iam_role.arn }}' wait: true register: eks_create diff --git a/tests/integration/targets/eks_fargate_profile/tasks/full_test.yml b/tests/integration/targets/eks_fargate_profile/tasks/full_test.yml index b992125b334..6b513b9b59b 100644 --- a/tests/integration/targets/eks_fargate_profile/tasks/full_test.yml +++ b/tests/integration/targets/eks_fargate_profile/tasks/full_test.yml @@ -1,5 +1,5 @@ # Creating dependencies -- name: create IAM instance role +- name: create IAM instance role iam_role: name: 'ansible-test-aws_eks_fargate_profile' assume_role_policy_document: '{{ lookup(''file'',''eks-fargate-profile-trust-policy.json'') }}' @@ -19,7 +19,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: fake_cluster - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -38,7 +38,7 @@ name: fake_profile cluster_name: '{{ eks_cluster_name }}' state: absent - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -57,7 +57,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'public') | map(attribute='subnet.id') }} @@ -72,13 +72,13 @@ - not eks_fargate_profile_create.changed - eks_fargate_profile_create.msg.endswith("provided in Fargate Profile is not a private subnet") -# Create Fargate_profile with wait +# Create Fargate_profile with wait - name: create Fargate Profile with wait (check mode) eks_fargate_profile: name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -99,7 +99,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -120,7 +120,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -141,7 +141,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -162,7 +162,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -185,7 +185,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -207,7 +207,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -230,7 +230,7 @@ name: '{{ eks_fargate_profile_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -253,7 +253,7 @@ name: '{{ eks_fargate_profile_name_b }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -272,7 +272,7 @@ name: '{{ eks_fargate_profile_name_b }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -291,7 +291,7 @@ name: '{{ eks_fargate_profile_name_b }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -310,7 +310,7 @@ name: '{{ eks_fargate_profile_name_b }}' state: present cluster_name: '{{ eks_cluster_name }}' - role_arn: '{{ iam_role_fargate.arn }}' + role_arn: '{{ iam_role_fargate.iam_role.arn }}' subnets: >- {{setup_subnets.results|selectattr('subnet.tags.Name', 'contains', 'private') | map(attribute='subnet.id') }} @@ -389,7 +389,7 @@ that: - eks_fargate_profile_b_delete.changed -- name: delete a fargate profile b +- name: delete a fargate profile b eks_fargate_profile: name: '{{ eks_fargate_profile_name_b }}' cluster_name: '{{ eks_cluster_name }}' @@ -426,4 +426,4 @@ - name: check that eks_fargate_profile did nothing (idempotency) assert: that: - - not eks_fargate_profile_b_delete.changed \ No newline at end of file + - not eks_fargate_profile_b_delete.changed diff --git a/tests/integration/targets/eks_nodegroup/tasks/dependecies.yml b/tests/integration/targets/eks_nodegroup/tasks/dependecies.yml index 882d45dd7af..cd37239c4a7 100644 --- a/tests/integration/targets/eks_nodegroup/tasks/dependecies.yml +++ b/tests/integration/targets/eks_nodegroup/tasks/dependecies.yml @@ -67,7 +67,7 @@ name: '{{ eks_cluster_name }}' security_groups: '{{ eks_security_groups | map(attribute=''name'') }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' - role_arn: '{{ iam_role.arn }}' + role_arn: '{{ iam_role.iam_role.arn }}' wait: true register: eks_create diff --git a/tests/integration/targets/eks_nodegroup/tasks/full_test.yml b/tests/integration/targets/eks_nodegroup/tasks/full_test.yml index 9accc8e8f29..cb1d27340df 100644 --- a/tests/integration/targets/eks_nodegroup/tasks/full_test.yml +++ b/tests/integration/targets/eks_nodegroup/tasks/full_test.yml @@ -4,7 +4,7 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: fake_cluster - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 @@ -38,10 +38,10 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' wait: True - launch_template: + launch_template: id: 'lt-0824c69cafa69ac81' disk_size: 30 register: eks_nodegroup_result @@ -60,18 +60,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 3 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'test' taints: - key: 'env' @@ -98,18 +98,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 3 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'test' taints: - key: 'env' @@ -135,18 +135,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 3 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'test' taints: - key: 'env' @@ -173,18 +173,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 3 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'test' taints: - key: 'env' @@ -214,7 +214,7 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' wait: True disk_size: 40 @@ -231,7 +231,7 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' wait: True instance_types: ['t3.small'] @@ -251,18 +251,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 4 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'changeit' taints: - key: 'env' @@ -289,18 +289,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 4 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'changeit' taints: - key: 'env' @@ -326,18 +326,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 4 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'changeit' taints: - key: 'env' @@ -364,18 +364,18 @@ name: '{{ eks_nodegroup_name_a }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' scaling_config: min_size: 1 max_size: 4 desired_size: 2 - disk_size: 30 + disk_size: 30 instance_types: ['t3.small'] ami_type: 'AL2_x86_64' update_config: max_unavailable_percentage: 50 - labels: + labels: 'env': 'changeit' taints: - key: 'env' @@ -405,7 +405,7 @@ name: '{{ eks_nodegroup_name_a }}' state: absent cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' register: eks_nodegroup_result check_mode: True @@ -459,7 +459,7 @@ name: '{{ eks_nodegroup_name_lt }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' launch_template: name: '{{ lt.template.launch_template_name }}' @@ -477,7 +477,7 @@ name: '{{ eks_nodegroup_name_lt }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' launch_template: name: '{{ lt.template.launch_template_name }}' @@ -494,7 +494,7 @@ name: '{{ eks_nodegroup_name_lt }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' launch_template: name: '{{ lt.template.launch_template_name }}' @@ -512,7 +512,7 @@ name: '{{ eks_nodegroup_name_lt }}' state: present cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' subnets: '{{ setup_subnets.results | map(attribute=''subnet.id'') }}' launch_template: name: '{{ lt.template.launch_template_name }}' @@ -533,7 +533,7 @@ name: '{{ eks_nodegroup_name_lt }}' state: absent cluster_name: '{{ eks_cluster_name }}' - node_role: '{{ iam_role_eks_nodegroup.arn }}' + node_role: '{{ iam_role_eks_nodegroup.iam_role.arn }}' wait: True register: eks_nodegroup_result check_mode: True diff --git a/tests/integration/targets/elb_target/tasks/lambda_target.yml b/tests/integration/targets/elb_target/tasks/lambda_target.yml index c4271cdd655..7e6b54cef5d 100644 --- a/tests/integration/targets/elb_target/tasks/lambda_target.yml +++ b/tests/integration/targets/elb_target/tasks/lambda_target.yml @@ -24,7 +24,7 @@ state: present zip_file: /tmp/lambda.zip runtime: python3.12 - role: "{{ ROLE_ARN.arn }}" + role: "{{ ROLE_ARN.iam_role.arn }}" handler: ansible_lambda_target.lambda_handler timeout: 30 register: lambda_function