diff --git a/changelogs/fragments/331_aws_ec2_inventory_use_contrib_script_compatible_ec2_tag_keys.yaml b/changelogs/fragments/331_aws_ec2_inventory_use_contrib_script_compatible_ec2_tag_keys.yaml new file mode 100644 index 00000000000..0835e16cd7f --- /dev/null +++ b/changelogs/fragments/331_aws_ec2_inventory_use_contrib_script_compatible_ec2_tag_keys.yaml @@ -0,0 +1,3 @@ +--- +minor_changes: +- "aws_ec2 inventory - expose a new configuration key ``use_contrib_script_compatible_ec2_tag_keys`` to reproduce a behavior of the old ``ec2.py`` inventory script. With this option enabled, each tag is exposed using a ``ec2_tag_TAGNAME`` key (https://github.com/ansible-collections/amazon.aws/pull/331)." diff --git a/plugins/inventory/aws_ec2.py b/plugins/inventory/aws_ec2.py index b5d92ad4d1a..26a9e8bc107 100644 --- a/plugins/inventory/aws_ec2.py +++ b/plugins/inventory/aws_ec2.py @@ -76,6 +76,12 @@ which group names end up being used as. type: bool default: False + use_contrib_script_compatible_ec2_tag_keys: + description: + - Expose the host tags with ec2_tag_TAGNAME keys like the old ec2.py inventory script. + - The use of this feature is discouraged and we advise to migrate to the new ``tags`` structure. + type: bool + default: False ''' EXAMPLES = ''' @@ -564,6 +570,10 @@ def _add_hosts(self, hosts, group, hostnames): host = camel_dict_to_snake_dict(host, ignore_list=['Tags']) host['tags'] = boto3_tag_list_to_ansible_dict(host.get('tags', [])) + if self.get_option('use_contrib_script_compatible_ec2_tag_keys'): + for k, v in host['tags'].items(): + host["ec2_tag_%s" % k] = v + # Allow easier grouping by region host['placement']['region'] = host['placement']['availability_zone'][:-1] diff --git a/tests/integration/targets/inventory_aws_ec2/playbooks/test_populating_inventory_with_use_contrib_script_keys.yml b/tests/integration/targets/inventory_aws_ec2/playbooks/test_populating_inventory_with_use_contrib_script_keys.yml new file mode 100644 index 00000000000..73a26bb9b85 --- /dev/null +++ b/tests/integration/targets/inventory_aws_ec2/playbooks/test_populating_inventory_with_use_contrib_script_keys.yml @@ -0,0 +1,62 @@ +--- +- hosts: 127.0.0.1 + connection: local + gather_facts: no + environment: "{{ ansible_test.environment }}" + tasks: + + - module_defaults: + group/aws: + aws_access_key: '{{ aws_access_key }}' + aws_secret_key: '{{ aws_secret_key }}' + security_token: '{{ security_token | default(omit) }}' + region: '{{ aws_region }}' + block: + + # Create VPC, subnet, security group, and find image_id to create instance + - include_tasks: setup.yml + + # Create new host, refresh inventory + - name: create a new host + ec2: + image: '{{ image_id }}' + exact_count: 1 + count_tag: + Name: '{{ resource_prefix }}:/aa' + instance_tags: + Name: '{{ resource_prefix }}:/aa' + OtherTag: value + instance_type: t2.micro + wait: yes + group_id: '{{ sg_id }}' + vpc_subnet_id: '{{ subnet_id }}' + register: setup_instance + + - meta: refresh_inventory + + - name: "register the current hostname, the : and / a replaced with _" + set_fact: + expected_hostname: "{{ resource_prefix }}__aa" + + - name: "Ensure we've got a hostvars entry for the new host" + assert: + that: + - expected_hostname in hostvars + - hostvars[expected_hostname].ec2_tag_OtherTag == "value" + + always: + + - name: remove setup ec2 instance + ec2: + instance_type: t2.micro + instance_ids: '{{ setup_instance.instance_ids }}' + state: absent + wait: yes + instance_tags: + Name: '{{ resource_prefix }}' + group_id: "{{ sg_id }}" + vpc_subnet_id: "{{ subnet_id }}" + ignore_errors: yes + when: setup_instance is defined + + - include_tasks: tear_down.yml diff --git a/tests/integration/targets/inventory_aws_ec2/runme.sh b/tests/integration/targets/inventory_aws_ec2/runme.sh index 9b9a54d8165..38e48fa090f 100755 --- a/tests/integration/targets/inventory_aws_ec2/runme.sh +++ b/tests/integration/targets/inventory_aws_ec2/runme.sh @@ -39,5 +39,9 @@ ansible-playbook playbooks/test_populating_inventory_with_constructed.yml "$@" ansible-playbook playbooks/create_inventory_config.yml -e "template='inventory_with_concatenation.yml.j2'" "$@" ansible-playbook playbooks/test_populating_inventory_with_concatenation.yml "$@" +# generate inventory config with caching and test using it +ansible-playbook playbooks/create_inventory_config.yml -e "template='inventory_with_use_contrib_script_keys.yml.j2'" "$@" +ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS=never ansible-playbook playbooks/test_populating_inventory_with_use_contrib_script_keys.yml "$@" + # cleanup inventory config ansible-playbook playbooks/empty_inventory_config.yml "$@" diff --git a/tests/integration/targets/inventory_aws_ec2/templates/inventory_with_use_contrib_script_keys.yml.j2 b/tests/integration/targets/inventory_aws_ec2/templates/inventory_with_use_contrib_script_keys.yml.j2 new file mode 100644 index 00000000000..e6b4068fa48 --- /dev/null +++ b/tests/integration/targets/inventory_aws_ec2/templates/inventory_with_use_contrib_script_keys.yml.j2 @@ -0,0 +1,15 @@ +plugin: amazon.aws.aws_ec2 +aws_access_key_id: '{{ aws_access_key }}' +aws_secret_access_key: '{{ aws_secret_key }}' +{% if security_token | default(false) %} +aws_security_token: '{{ security_token }}' +{% endif %} +regions: +- '{{ aws_region }}' +filters: + tag:Name: + - '{{ resource_prefix }}:/aa' +hostnames: +- tag:Name +use_contrib_script_compatible_sanitization: True +use_contrib_script_compatible_ec2_tag_keys: True