diff --git a/changelogs/fragments/2107-s3_download.yml b/changelogs/fragments/2107-s3_download.yml new file mode 100644 index 00000000000..b4604192745 --- /dev/null +++ b/changelogs/fragments/2107-s3_download.yml @@ -0,0 +1,2 @@ +bugfixes: + - s3_object - fixed issue which was causing ``MemoryError`` exceptions when downloading large files (https://github.com/ansible-collections/amazon.aws/issues/2107). diff --git a/plugins/modules/s3_object.py b/plugins/modules/s3_object.py index 0486d3b9f81..c3e45004a37 100644 --- a/plugins/modules/s3_object.py +++ b/plugins/modules/s3_object.py @@ -783,9 +783,6 @@ def upload_s3file( def download_s3file(module, s3, bucket, obj, dest, retries, version=None): if module.check_mode: module.exit_json(msg="GET operation skipped - running in check mode", changed=True) - # retries is the number of loops; range/xrange needs to be one - # more to get that count of loops. - _get_object_content(module, s3, bucket, obj, version) optional_kwargs = {"ExtraArgs": {"VersionId": version}} if version else {} for x in range(0, retries + 1):