Skip to content

Commit

Permalink
s3_logging - migrate to boto3 (ansible-collections#447)
Browse files Browse the repository at this point in the history
* Migrate s3_logging to boto3
* Re-enable s3_logging tests
* Add support for check_mode
* Catch and retry on "InvalidTargetBucketForLogging" - ACL updates occasionally take time to propogate
* changelog
  • Loading branch information
tremble authored Mar 27, 2021
1 parent 9c0967c commit a38682a
Show file tree
Hide file tree
Showing 5 changed files with 219 additions and 81 deletions.
3 changes: 3 additions & 0 deletions changelogs/fragments/447-s3_logging-boto3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
minor_changes:
- s3_logging - migrated from boto to boto3 (https://github.com/ansible-collections/community.aws/pull/447).
- s3_logging - added support for check_mode (https://github.com/ansible-collections/community.aws/pull/447).
158 changes: 100 additions & 58 deletions plugins/modules/s3_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,27 +59,68 @@
'''

try:
import boto.ec2
from boto.s3.connection import OrdinaryCallingFormat, Location
from boto.exception import S3ResponseError
import botocore
except ImportError:
pass # Handled by HAS_BOTO
pass # Handled by AnsibleAWSModule

from ansible_collections.amazon.aws.plugins.module_utils.ec2 import camel_dict_to_snake_dict

from ansible.module_utils._text import to_native
from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import AnsibleAWSError
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import get_aws_connection_info
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import HAS_BOTO
from ansible_collections.amazon.aws.plugins.module_utils.core import is_boto3_error_code
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import AWSRetry


def compare_bucket_logging(bucket, target_bucket, target_prefix):
def compare_bucket_logging(bucket_logging, target_bucket, target_prefix):

bucket_log_obj = bucket.get_logging_status()
if bucket_log_obj.target != target_bucket or bucket_log_obj.prefix != target_prefix:
if not bucket_logging.get('LoggingEnabled', False):
if target_bucket:
return True
return False
else:

logging = bucket_logging['LoggingEnabled']
if logging['TargetBucket'] != target_bucket:
return True
if logging['TargetPrefix'] != target_prefix:
return True
return False


def verify_acls(connection, module, target_bucket):
try:
current_acl = connection.get_bucket_acl(aws_retry=True, Bucket=target_bucket)
current_grants = current_acl['Grants']
except is_boto3_error_code('NoSuchBucket'):
module.fail_json(msg="Target Bucket '{0}' not found".format(target_bucket))
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed to fetch target bucket ACL")

required_grant = {
'Grantee': {
'URI': "http://acs.amazonaws.com/groups/s3/LogDelivery",
'Type': 'Group'
},
'Permission': 'FULL_CONTROL'
}

for grant in current_grants:
if grant == required_grant:
return False

if module.check_mode:
return True

updated_acl = dict(current_acl)
updated_grants = list(current_grants)
updated_grants.append(required_grant)
updated_acl['Grants'] = updated_grants
del updated_acl['ResponseMetadata']
try:
connection.put_bucket_acl(aws_retry=True, Bucket=target_bucket, AccessControlPolicy=updated_acl)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to update target bucket ACL to allow log delivery")

return True


def enable_bucket_logging(connection, module):

Expand All @@ -89,29 +130,37 @@ def enable_bucket_logging(connection, module):
changed = False

try:
bucket = connection.get_bucket(bucket_name)
except S3ResponseError as e:
module.fail_json(msg=to_native(e))
bucket_logging = connection.get_bucket_logging(aws_retry=True, Bucket=bucket_name)
except is_boto3_error_code('NoSuchBucket'):
module.fail_json(msg="Bucket '{0}' not found".format(bucket_name))
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed to fetch current logging status")

try:
if not compare_bucket_logging(bucket, target_bucket, target_prefix):
# Before we can enable logging we must give the log-delivery group WRITE and READ_ACP permissions to the target bucket
try:
target_bucket_obj = connection.get_bucket(target_bucket)
except S3ResponseError as e:
if e.status == 301:
module.fail_json(msg="the logging target bucket must be in the same region as the bucket being logged")
else:
module.fail_json(msg=to_native(e))
target_bucket_obj.set_as_logging_target()
changed |= verify_acls(connection, module, target_bucket)

bucket.enable_logging(target_bucket, target_prefix)
changed = True
if not compare_bucket_logging(bucket_logging, target_bucket, target_prefix):
bucket_logging = camel_dict_to_snake_dict(bucket_logging)
module.exit_json(changed=changed, **bucket_logging)

except S3ResponseError as e:
module.fail_json(msg=to_native(e))
if module.check_mode:
module.exit_json(changed=True)

module.exit_json(changed=changed)
result = connection.put_bucket_logging(
aws_retry=True,
Bucket=bucket_name,
BucketLoggingStatus={
'LoggingEnabled': {
'TargetBucket': target_bucket,
'TargetPrefix': target_prefix,
}
})

except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to enable bucket logging")

result = camel_dict_to_snake_dict(result)
module.exit_json(changed=True, **result)


def disable_bucket_logging(connection, module):
Expand All @@ -120,14 +169,26 @@ def disable_bucket_logging(connection, module):
changed = False

try:
bucket = connection.get_bucket(bucket_name)
if not compare_bucket_logging(bucket, None, None):
bucket.disable_logging()
changed = True
except S3ResponseError as e:
module.fail_json(msg=to_native(e))
bucket_logging = connection.get_bucket_logging(aws_retry=True, Bucket=bucket_name)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to fetch current logging status")

if not compare_bucket_logging(bucket_logging, None, None):
module.exit_json(changed=False)

if module.check_mode:
module.exit_json(changed=True)

try:
response = AWSRetry.jittered_backoff(
catch_extra_error_codes=['InvalidTargetBucketForLogging']
)(connection.put_bucket_logging)(
Bucket=bucket_name, BucketLoggingStatus={}
)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to disable bucket logging")

module.exit_json(changed=changed)
module.exit_json(changed=True)


def main():
Expand All @@ -139,28 +200,9 @@ def main():
state=dict(required=False, default='present', choices=['present', 'absent']),
)

module = AnsibleAWSModule(argument_spec=argument_spec)

if not HAS_BOTO:
module.fail_json(msg='boto required for this module')

region, ec2_url, aws_connect_params = get_aws_connection_info(module)

if region in ('us-east-1', '', None):
# S3ism for the US Standard region
location = Location.DEFAULT
else:
# Boto uses symbolic names for locations but region strings will
# actually work fine for everything except us-east-1 (US Standard)
location = region
try:
connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params)
# use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases
if connection is None:
connection = boto.connect_s3(**aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)

connection = module.client('s3', retry_decorator=AWSRetry.jittered_backoff())
state = module.params.get("state")

if state == 'present':
Expand Down
5 changes: 1 addition & 4 deletions tests/integration/targets/s3_logging/aliases
Original file line number Diff line number Diff line change
@@ -1,5 +1,2 @@
# reason: unstable
# https://github.com/ansible/ansible/issues/63520
unsupported

cloud/aws
shippable/aws/group3
2 changes: 1 addition & 1 deletion tests/integration/targets/s3_logging/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
---
test_bucket: '{{ resource_prefix }}-testbucket'
test_bucket: '{{ resource_prefix }}-s3-logging'
log_bucket_1: '{{ resource_prefix }}-logs-1'
log_bucket_2: '{{ resource_prefix }}-logs-2'
Loading

0 comments on commit a38682a

Please sign in to comment.