Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

lambda - Add support for setting supported architectures #1377

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions changelogs/fragments/744-lambda-architectures.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
minor_changes:
- lambda - Adds support for setting Architectures (https://github.com/ansible-collections/community.aws/issues/744).
160 changes: 105 additions & 55 deletions plugins/modules/lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
version_added: 1.0.0
short_description: Manage AWS Lambda functions
description:
- Allows for the management of Lambda functions.
- Allows for the management of Lambda functions.
options:
name:
description:
Expand Down Expand Up @@ -108,13 +108,21 @@
- The KMS key ARN used to encrypt the function's environment variables.
type: str
version_added: 3.3.0
architecture:
description:
- The instruction set architecture that the function supports.
- Requires one of I(s3_bucket) or I(zip_file).
- Requires botocore >= 1.21.51.
type: str
choices: ['x86_64', 'arm64']
aliases: ['architectures']
version_added: 5.0.0
author:
- 'Steyn Huizinga (@steynovich)'
- 'Steyn Huizinga (@steynovich)'
extends_documentation_fragment:
- amazon.aws.aws
- amazon.aws.ec2
- amazon.aws.tags

- amazon.aws.aws
- amazon.aws.ec2
- amazon.aws.tags
'''

EXAMPLES = r'''
Expand Down Expand Up @@ -192,6 +200,12 @@
returned: success
type: dict
contains:
architectures:
description: The architectures supported by the function.
returned: successful run where botocore >= 1.21.51
type: list
elements: str
sample: ['arm64']
code_sha256:
description: The SHA256 hash of the function's deployment package.
returned: success
Expand Down Expand Up @@ -452,6 +466,66 @@ def format_response(response):
return result


def _zip_args(zip_file, current_config, ignore_checksum):
if not zip_file:
return {}

# If there's another change that needs to happen, we always re-upload the code
if not ignore_checksum:
local_checksum = sha256sum(zip_file)
remote_checksum = current_config.get('CodeSha256', '')
if local_checksum == remote_checksum:
return {}

with open(zip_file, 'rb') as f:
zip_content = f.read()
return {'ZipFile': zip_content}


def _s3_args(s3_bucket, s3_key, s3_object_version):
if not s3_bucket:
return {}
if not s3_key:
return {}

code = {'S3Bucket': s3_bucket,
'S3Key': s3_key}
if s3_object_version:
code.update({'S3ObjectVersion': s3_object_version})

return code


def _code_args(module, current_config):
s3_bucket = module.params.get('s3_bucket')
s3_key = module.params.get('s3_key')
s3_object_version = module.params.get('s3_object_version')
zip_file = module.params.get('zip_file')
architectures = module.params.get('architecture')
checksum_match = False

code_kwargs = {}

if architectures and current_config.get('Architectures', None) != [architectures]:
module.warn('Arch Change')
code_kwargs.update({'Architectures': [architectures]})

try:
code_kwargs.update(_zip_args(zip_file, current_config, bool(code_kwargs)))
except IOError as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())

code_kwargs.update(_s3_args(s3_bucket, s3_key, s3_object_version))

if not code_kwargs:
return {}

if not architectures and current_config.get('Architectures', None):
code_kwargs.update({'Architectures': current_config.get('Architectures', None)})

return code_kwargs


def main():
argument_spec = dict(
name=dict(required=True),
Expand All @@ -472,6 +546,7 @@ def main():
dead_letter_arn=dict(),
kms_key_arn=dict(type='str', no_log=False),
tracing_mode=dict(choices=['Active', 'PassThrough']),
architecture=dict(choices=['x86_64', 'arm64'], type='str', aliases=['architectures']),
tags=dict(type='dict', aliases=['resource_tags']),
purge_tags=dict(type='bool', default=True),
)
Expand All @@ -483,7 +558,11 @@ def main():
required_together = [['s3_key', 's3_bucket'],
['vpc_subnet_ids', 'vpc_security_group_ids']]

required_if = [['state', 'present', ['runtime', 'handler', 'role']]]
required_if = [
['state', 'present', ['runtime', 'handler', 'role']],
['architecture', 'x86_64', ['zip_file', 's3_bucket'], True],
['architecture', 'arm64', ['zip_file', 's3_bucket'], True],
]

module = AnsibleAWSModule(argument_spec=argument_spec,
supports_check_mode=True,
Expand Down Expand Up @@ -511,10 +590,15 @@ def main():
tags = module.params.get('tags')
purge_tags = module.params.get('purge_tags')
kms_key_arn = module.params.get('kms_key_arn')
architectures = module.params.get('architecture')

check_mode = module.check_mode
changed = False

if architectures:
module.require_botocore_at_least(
'1.21.51', reason='to configure the architectures that the function supports.')

try:
client = module.client('lambda', retry_decorator=AWSRetry.jittered_backoff())
except (ClientError, BotoCoreError) as e:
Expand Down Expand Up @@ -602,39 +686,17 @@ def main():
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Trying to update lambda configuration")

# Update code configuration
code_kwargs = {'FunctionName': name, 'Publish': True}

# Update S3 location
if s3_bucket and s3_key:
# If function is stored on S3 always update
code_kwargs.update({'S3Bucket': s3_bucket, 'S3Key': s3_key})

# If S3 Object Version is given
if s3_object_version:
code_kwargs.update({'S3ObjectVersion': s3_object_version})

# Compare local checksum, update remote code when different
elif zip_file:
local_checksum = sha256sum(zip_file)
remote_checksum = current_config['CodeSha256']

# Only upload new code when local code is different compared to the remote code
if local_checksum != remote_checksum:
try:
with open(zip_file, 'rb') as f:
encoded_zip = f.read()
code_kwargs.update({'ZipFile': encoded_zip})
except IOError as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())

# Tag Function
if tags is not None:
if set_tag(client, module, tags, current_function, purge_tags):
changed = True

# Upload new code if needed (e.g. code checksum has changed)
if len(code_kwargs) > 2:
code_kwargs = _code_args(module, current_config)
if code_kwargs:

# Update code configuration
code_kwargs.update({'FunctionName': name, 'Publish': True})

if not check_mode:
wait_for_lambda(client, module, name)

Expand All @@ -652,38 +714,26 @@ def main():
module.fail_json(msg='Unable to get function information after updating')
response = format_response(response)
# We're done
module.exit_json(changed=changed, **response)
module.exit_json(changed=changed, code_kwargs=code_kwargs, func_kwargs=func_kwargs, **response)

# Function doesn't exists, create new Lambda function
elif state == 'present':
if s3_bucket and s3_key:
# If function is stored on S3
code = {'S3Bucket': s3_bucket,
'S3Key': s3_key}
if s3_object_version:
code.update({'S3ObjectVersion': s3_object_version})
elif zip_file:
# If function is stored in local zipfile
try:
with open(zip_file, 'rb') as f:
zip_content = f.read()

code = {'ZipFile': zip_content}
except IOError as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())

else:
module.fail_json(msg='Either S3 object or path to zipfile required')

func_kwargs = {'FunctionName': name,
'Publish': True,
'Runtime': runtime,
'Role': role_arn,
'Code': code,
'Timeout': timeout,
'MemorySize': memory_size,
}

code = _code_args(module, {})
if not code:
module.fail_json(msg='Either S3 object or path to zipfile required')
if 'Architectures' in code:
func_kwargs.update({'Architectures': code.pop('Architectures')})
func_kwargs.update({'Code': code})

if description is not None:
func_kwargs.update({'Description': description})

Expand Down
16 changes: 10 additions & 6 deletions plugins/modules/lambda_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
- Gathers various details related to Lambda functions, including aliases, versions and event source mappings.
- Use module M(community.aws.lambda) to manage the lambda function itself, M(community.aws.lambda_alias) to manage function aliases,
M(community.aws.lambda_event) to manage lambda event source mappings, and M(community.aws.lambda_policy) to manage policy statements.


options:
query:
description:
Expand All @@ -34,11 +32,11 @@
description:
- When I(query=mappings), this is the Amazon Resource Name (ARN) of the Amazon Kinesis or DynamoDB stream.
type: str
author: Pierre Jodouin (@pjodouin)
author:
- Pierre Jodouin (@pjodouin)
extends_documentation_fragment:
- amazon.aws.aws
- amazon.aws.ec2

- amazon.aws.aws
- amazon.aws.ec2
'''

EXAMPLES = '''
Expand Down Expand Up @@ -94,6 +92,12 @@
returned: when C(query) is I(aliases) or I(all)
type: list
elements: str
architectures:
description: The architectures supported by the function.
returned: successful run where botocore >= 1.21.51
type: list
elements: str
sample: ['arm64']
code_sha256:
description: The SHA256 hash of the function's deployment package.
returned: success
Expand Down
5 changes: 4 additions & 1 deletion tests/integration/targets/lambda/meta/main.yml
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
dependencies: []
dependencies:
- role: setup_botocore_pip
vars:
botocore_version: "1.21.51"
10 changes: 10 additions & 0 deletions tests/integration/targets/lambda/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,9 @@
handler: '{{ lambda_python_handler }}'
role: '{{ lambda_role_name }}'
zip_file: '{{ zip_res.dest }}'
architecture: arm64
vars:
ansible_python_interpreter: "{{ botocore_virtualenv_interpreter }}"
register: result
check_mode: yes
- name: assert lambda upload succeeded
Expand All @@ -124,12 +127,16 @@
handler: '{{ lambda_python_handler }}'
role: '{{ lambda_role_name }}'
zip_file: '{{ zip_res.dest }}'
architecture: arm64
vars:
ansible_python_interpreter: "{{ botocore_virtualenv_interpreter }}"
register: result
- name: assert lambda upload succeeded
assert:
that:
- result.changed
- result.configuration.tracing_config.mode == "PassThrough"
- result.configuration.architectures == ['arm64']

- include_tasks: 'tagging.yml'

Expand Down Expand Up @@ -260,6 +267,8 @@
query: all
register: lambda_infos_all
check_mode: yes
vars:
ansible_python_interpreter: "{{ botocore_virtualenv_interpreter }}"
- name: lambda_info | Assert successfull retrieval of all information
assert:
that:
Expand All @@ -275,6 +284,7 @@
- lambda_infos_all.functions[0].policy is defined
- lambda_infos_all.functions[0].mappings is defined
- lambda_infos_all.functions[0].tags is defined
- lambda_infos_all.functions[0].architectures == ['arm64']

- name: lambda_info | Ensure default query value is 'config' when function name omitted
lambda_info:
Expand Down