diff --git a/changelogs/fragments/921-s3_object-handle-file-upload-to-acl-disabled-bucket.yml b/changelogs/fragments/921-s3_object-handle-file-upload-to-acl-disabled-bucket.yml new file mode 100644 index 00000000000..68c0d35e166 --- /dev/null +++ b/changelogs/fragments/921-s3_object-handle-file-upload-to-acl-disabled-bucket.yml @@ -0,0 +1,2 @@ +minor_changes: +- s3_object - updated module to add support for handling file upload to a bucket with ACL disabled (https://github.com/ansible-collections/amazon.aws/pull/921). diff --git a/plugins/modules/s3_object.py b/plugins/modules/s3_object.py index 8b15a1b6dd0..b6ba7033ede 100644 --- a/plugins/modules/s3_object.py +++ b/plugins/modules/s3_object.py @@ -634,7 +634,7 @@ def option_in_extra_args(option): return allowed_extra_args[temp_option] -def upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, src=None, content=None): +def upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, src=None, content=None, acl_disabled=False): if module.check_mode: module.exit_json(msg="PUT operation skipped - running in check mode", changed=True) try: @@ -677,13 +677,14 @@ def upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, s s3.upload_fileobj(Fileobj=f, Bucket=bucket, Key=obj, ExtraArgs=extra) except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: module.fail_json_aws(e, msg="Unable to complete PUT operation.") - try: - for acl in module.params.get('permission'): - s3.put_object_acl(ACL=acl, Bucket=bucket, Key=obj) - except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS): - module.warn("PutObjectAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning") - except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except - module.fail_json_aws(e, msg="Unable to set object ACL") + if not acl_disabled: + try: + for acl in module.params.get('permission'): + s3.put_object_acl(ACL=acl, Bucket=bucket, Key=obj) + except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS): + module.warn("PutObjectAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning") + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except + module.fail_json_aws(e, msg="Unable to set object ACL") # Tags tags, changed = ensure_tags(s3, module, bucket, obj) @@ -1056,12 +1057,25 @@ def main(): validate = not ignore_nonexistent_bucket + # check if bucket exists, if yes, check if ACL is disabled + acl_disabled = False + exists = bucket_check(module, s3, bucket) + if exists: + try: + object_ownership = s3.get_bucket_ownership_controls(Bucket=bucket)['OwnershipControls']['Rules'][0]['ObjectOwnership'] + if object_ownership == 'BucketOwnerEnforced': + acl_disabled = True + # if bucket ownership controls are not found + except botocore.exceptions.ClientError as e: + pass + # separate types of ACLs - bucket_acl = [acl for acl in module.params.get('permission') if acl in bucket_canned_acl] - object_acl = [acl for acl in module.params.get('permission') if acl in object_canned_acl] - error_acl = [acl for acl in module.params.get('permission') if acl not in bucket_canned_acl and acl not in object_canned_acl] - if error_acl: - module.fail_json(msg='Unknown permission specified: %s' % error_acl) + if not acl_disabled: + bucket_acl = [acl for acl in module.params.get('permission') if acl in bucket_canned_acl] + object_acl = [acl for acl in module.params.get('permission') if acl in object_canned_acl] + error_acl = [acl for acl in module.params.get('permission') if acl not in bucket_canned_acl and acl not in object_canned_acl] + if error_acl: + module.fail_json(msg='Unknown permission specified: %s' % error_acl) # First, we check to see if the bucket exists, we get "bucket" returned. bucketrtn = bucket_check(module, s3, bucket, validate=validate) @@ -1124,8 +1138,9 @@ def main(): get_download_url(module, s3, bucket, obj, expiry, tags, changed=tags_update) # only use valid object acls for the upload_s3file function - module.params['permission'] = object_acl - upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, src=src, content=bincontent) + if not acl_disabled: + module.params['permission'] = object_acl + upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, src=src, content=bincontent, acl_disabled=acl_disabled) # Delete an object from a bucket, not the entire bucket if mode == 'delobj': diff --git a/tests/integration/targets/s3_object/meta/main.yml b/tests/integration/targets/s3_object/meta/main.yml index e69de29bb2d..1810d4bec98 100644 --- a/tests/integration/targets/s3_object/meta/main.yml +++ b/tests/integration/targets/s3_object/meta/main.yml @@ -0,0 +1,2 @@ +dependencies: + - setup_remote_tmp_dir diff --git a/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml b/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml new file mode 100644 index 00000000000..7fbd8b786bb --- /dev/null +++ b/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml @@ -0,0 +1,111 @@ +- name: test copying objects to bucket with ACL disabled + block: + - name: Create a bucket with ACL disabled for the test + s3_bucket: + name: "{{ bucket_name }}-acl-disabled" + object_ownership: BucketOwnerEnforced + state: present + register: create_result + + - name: Ensure bucket creation + assert: + that: + - create_result is changed + - create_result is not failed + - create_result.object_ownership == "BucketOwnerEnforced" + + - name: Create content + set_fact: + content: "{{ lookup('password', '/dev/null chars=ascii_letters,digits,hexdigits,punctuation') }}" + + - name: Create local acl_disabled_upload_test.txt + copy: + content: "{{ content }}" + dest: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + + - name: Upload a file to the bucket (check_mode) + amazon.aws.s3_object: + bucket: "{{ bucket_name }}-acl-disabled" + src: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + object: "acl_disabled_upload_test.txt" + mode: put + check_mode: true + register: upload_file_result + + - assert: + that: + - upload_file_result is changed + - upload_file_result is not failed + - upload_file_result.msg == "PUT operation skipped - running in check mode" + - '"s3:PutObject" not in upload_file_result.resource_actions' + + - name: Upload a file to the bucket + amazon.aws.s3_object: + bucket: "{{ bucket_name }}-acl-disabled" + src: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + object: "acl_disabled_upload_test.txt" + mode: put + register: upload_file_result + + - assert: + that: + - upload_file_result is changed + - upload_file_result is not failed + - upload_file_result.msg == "PUT operation complete" + - '"s3:PutObject" in upload_file_result.resource_actions' + + - name: Upload a file to the bucket (check_mode - idempotency) + amazon.aws.s3_object: + bucket: "{{ bucket_name }}-acl-disabled" + src: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + object: "acl_disabled_upload_test.txt" + mode: put + check_mode: true + register: upload_file_result + + - assert: + that: + - upload_file_result is not changed + - upload_file_result is not failed + - upload_file_result.msg != "PUT operation complete" + - '"s3:PutObject" not in upload_file_result.resource_actions' + + - name: Upload a file to the bucket (idempotency) + amazon.aws.s3_object: + bucket: "{{ bucket_name }}-acl-disabled" + src: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + object: "acl_disabled_upload_test.txt" + mode: put + register: upload_file_result + + - assert: + that: + - upload_file_result is not changed + - upload_file_result is not failed + - upload_file_result.msg != "PUT operation complete" + - '"s3:PutObject" not in upload_file_result.resource_actions' + + always: + + - name: Delete the file in the bucket + amazon.aws.s3_object: + bucket: "{{ bucket_name }}-acl-disabled" + src: "{{ remote_tmp_dir }}/acl_disabled_upload_test.txt" + object: "acl_disabled_upload_test.txt" + mode: delobj + retries: 3 + delay: 3 + ignore_errors: true + + - name: Delete bucket created in this test + s3_bucket: + name: "{{ bucket_name }}-acl-disabled" + object_ownership: BucketOwnerEnforced + state: absent + register: delete_result + + - name: Ensure bucket deletion + assert: + that: + - delete_result is changed + - delete_result is not failed diff --git a/tests/integration/targets/s3_object/tasks/main.yml b/tests/integration/targets/s3_object/tasks/main.yml index 05c110af6a3..9770a555eb6 100644 --- a/tests/integration/targets/s3_object/tasks/main.yml +++ b/tests/integration/targets/s3_object/tasks/main.yml @@ -16,11 +16,10 @@ set_fact: aws_account: "{{ aws_caller_info.account }}" - - name: Create temporary directory - tempfile: - state: directory - path: /var/tmp - register: tmpdir + - name: check that temp directory was made + assert: + that: + - remote_tmp_dir is defined - name: Create content set_fact: @@ -73,11 +72,11 @@ - name: Create local upload.txt copy: content: "{{ content }}" - dest: "{{ tmpdir.path }}/upload.txt" + dest: "{{ remote_tmp_dir }}/upload.txt" - name: stat the file stat: - path: "{{ tmpdir.path }}/upload.txt" + path: "{{ remote_tmp_dir }}/upload.txt" get_checksum: yes register: upload_file @@ -85,7 +84,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt retries: 3 delay: 3 @@ -100,7 +99,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt register: test_async async: 30 @@ -117,7 +116,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt retries: 3 delay: 3 @@ -165,7 +164,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt overwrite: never retries: 3 @@ -180,7 +179,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt overwrite: different retries: 3 @@ -195,7 +194,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: delete.txt overwrite: always retries: 3 @@ -210,7 +209,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt retries: 3 delay: 3 @@ -219,7 +218,7 @@ - name: stat the file so we can compare the checksums stat: - path: "{{ tmpdir.path }}/download.txt" + path: "{{ remote_tmp_dir }}/download.txt" get_checksum: yes register: download_file @@ -231,7 +230,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt retries: 3 delay: 3 @@ -243,14 +242,14 @@ - name: modify destination copy: - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" src: hello.txt - name: test get with overwrite=never s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt overwrite: never retries: 3 @@ -265,7 +264,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt retries: 3 delay: 3 @@ -279,7 +278,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt overwrite: always retries: 3 @@ -294,7 +293,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt overwrite: latest retries: 3 @@ -306,13 +305,13 @@ - result is not changed - name: modify mtime for local file to past - shell: touch -mt 197001010900.00 "{{ tmpdir.path }}/download.txt" + shell: touch -mt 197001010900.00 "{{ remote_tmp_dir }}/download.txt" - name: test get with overwrite=latest and files that mtimes are different s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: delete.txt overwrite: latest retries: 3 @@ -383,7 +382,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" encrypt: yes object: delete_encrypt.txt retries: 3 @@ -399,7 +398,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download_encrypted.txt" + dest: "{{ remote_tmp_dir }}/download_encrypted.txt" object: delete_encrypt.txt retries: 3 delay: 3 @@ -408,7 +407,7 @@ - name: stat the file so we can compare the checksums stat: - path: "{{ tmpdir.path }}/download_encrypted.txt" + path: "{{ remote_tmp_dir }}/download_encrypted.txt" get_checksum: yes register: download_file @@ -428,7 +427,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" encrypt: yes encryption_mode: aws:kms object: delete_encrypt_kms.txt @@ -445,7 +444,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download_kms.txt" + dest: "{{ remote_tmp_dir }}/download_kms.txt" object: delete_encrypt_kms.txt retries: 3 delay: 3 @@ -454,7 +453,7 @@ - name: get the stat of the file so we can compare the checksums stat: - path: "{{ tmpdir.path }}/download_kms.txt" + path: "{{ remote_tmp_dir }}/download_kms.txt" get_checksum: yes register: download_file @@ -543,12 +542,12 @@ - name: make tempfile 4 GB for OSX command: - _raw_params: "dd if=/dev/zero of={{ tmpdir.path }}/largefile bs=1m count=4096" + _raw_params: "dd if=/dev/zero of={{ remote_tmp_dir }}/largefile bs=1m count=4096" when: ansible_distribution == 'MacOSX' - name: make tempfile 4 GB for linux command: - _raw_params: "dd if=/dev/zero of={{ tmpdir.path }}/largefile bs=1M count=4096" + _raw_params: "dd if=/dev/zero of={{ remote_tmp_dir }}/largefile bs=1M count=4096" when: ansible_system == 'Linux' - name: test multipart download - platform specific @@ -562,14 +561,14 @@ s3_object: bucket: "{{ bucket_name }}" mode: put - src: "{{ tmpdir.path }}/largefile" + src: "{{ remote_tmp_dir }}/largefile" object: multipart.txt - name: download file once s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: multipart.txt overwrite: different retries: 3 @@ -585,7 +584,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download.txt" + dest: "{{ remote_tmp_dir }}/download.txt" object: multipart.txt overwrite: different register: result @@ -626,7 +625,7 @@ s3_object: bucket: "{{ bucket_name_acl }}" mode: put - src: "{{ tmpdir.path }}/upload.txt" + src: "{{ remote_tmp_dir }}/upload.txt" object: file-with-permissions.txt permission: bucket-owner-full-control ignore_nonexistent_bucket: True @@ -744,7 +743,7 @@ s3_object: bucket: "{{ bucket_name }}" mode: get - dest: "{{ tmpdir.path }}/download_binary.bin" + dest: "{{ remote_tmp_dir }}/download_binary.bin" object: put-binary.bin register: result @@ -754,7 +753,7 @@ get_checksum: yes loop: - "{{ role_path }}/files/test.png" - - "{{ tmpdir.path }}/download_binary.bin" + - "{{ remote_tmp_dir }}/download_binary.bin" register: binary_files - assert: @@ -763,6 +762,8 @@ - include_tasks: copy_object.yml + - include_tasks: copy_object_acl_disabled_bucket.yml + # ============================================================ - name: 'Run tagging tests' block: @@ -1003,7 +1004,7 @@ - name: delete temporary files file: state: absent - path: "{{ tmpdir.path }}" + path: "{{ remote_tmp_dir }}" ignore_errors: true - include_tasks: delete_bucket.yml