Skip to content

Commit

Permalink
Minor sanity test fixes. (#1410)
Browse files Browse the repository at this point in the history
Minor sanity test fixes (new devel)

SUMMARY
ansible-devel has added a new PEP test (missing whitespace after keyword), this adds the fixes before the devel sanity tests are 'voting'.
Additionally fixes:

unused variables
broad catching of Exception

ISSUE TYPE

Bugfix Pull Request

COMPONENT NAME
plugins/modules/autoscaling_group_info.py
plugins/modules/cloudfront_distribution.py
plugins/modules/cloudfront_origin_access_identity.py
plugins/modules/cloudtrail.py
plugins/modules/ec2_vpc_nacl.py
plugins/modules/eks_fargate_profile.py
plugins/modules/redshift.py
plugins/modules/s3_bucket_info.py
ADDITIONAL INFORMATION
cloudfront_distribution still has a lot of catch Exception but it's part of parameter validation which should be overhauled separately, unfortunately the tests are rather b0rked.

Reviewed-by: Alina Buzachis <None>
  • Loading branch information
tremble authored Aug 15, 2022
1 parent 9660de0 commit 3d4736b
Show file tree
Hide file tree
Showing 12 changed files with 46 additions and 54 deletions.
9 changes: 9 additions & 0 deletions changelogs/fragments/1410-linting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
minor_changes:
- autoscaling_group_info - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- cloudfront_distribution - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- cloudfront_origin_access_identity - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- cloudtrail - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- ec2_vpc_nacl - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- eks_fargate_profile - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- redshift - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
- s3_bucket_info - minor sanity test fixes (https://github.com/ansible-collections/community.aws/pull/1410).
9 changes: 4 additions & 5 deletions plugins/connection/aws_ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,12 +204,11 @@
try:
import boto3
from botocore.client import Config
HAS_BOTO_3 = True
except ImportError as e:
HAS_BOTO_3_ERROR = str(e)
HAS_BOTO_3 = False
pass

from functools import wraps
from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleFileNotFound
from ansible.module_utils.basic import missing_required_lib
from ansible.module_utils.six.moves import xrange
Expand Down Expand Up @@ -290,8 +289,8 @@ class Connection(ConnectionBase):
MARK_LENGTH = 26

def __init__(self, *args, **kwargs):
if not HAS_BOTO_3:
raise AnsibleError('{0}: {1}'.format(missing_required_lib("boto3"), HAS_BOTO_3_ERROR))
if not HAS_BOTO3:
raise AnsibleError('{0}'.format(missing_required_lib("boto3")))

super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
Expand Down
4 changes: 2 additions & 2 deletions plugins/modules/autoscaling_group_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ def find_asgs(conn, module, name=None, tags=None):

try:
elbv2 = module.client('elbv2')
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError):
# This is nice to have, not essential
elbv2 = None
matched_asgs = []
Expand Down Expand Up @@ -409,7 +409,7 @@ def find_asgs(conn, module, name=None, tags=None):
# workaround for https://github.com/ansible/ansible/pull/25015
if 'target_group_ar_ns' in asg:
asg['target_group_arns'] = asg['target_group_ar_ns']
del(asg['target_group_ar_ns'])
del asg['target_group_ar_ns']
if asg.get('target_group_arns'):
if elbv2:
try:
Expand Down
12 changes: 6 additions & 6 deletions plugins/modules/cloudfront_distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -1676,7 +1676,7 @@ def validate_origins(self, client, config, origins, default_origin_domain_name,
if purge_origins:
for domain in list(all_origins.keys()):
if domain not in new_domains:
del(all_origins[domain])
del all_origins[domain]
return ansible_list_to_cloudfront_list(list(all_origins.values()))
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution origins")
Expand All @@ -1694,7 +1694,7 @@ def validate_s3_origin_configuration(self, client, existing_config, origin):
cfoai_config = dict(CloudFrontOriginAccessIdentityConfig=dict(CallerReference=caller_reference,
Comment=comment))
oai = client.create_cloud_front_origin_access_identity(**cfoai_config)['CloudFrontOriginAccessIdentity']['Id']
except Exception as e:
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
self.module.fail_json_aws(e, msg="Couldn't create Origin Access Identity for id %s" % origin['id'])
return "origin-access-identity/cloudfront/%s" % oai

Expand All @@ -1717,7 +1717,7 @@ def validate_origin(self, client, existing_config, origin, default_origin_path):
else:
s3_origin_config = None

del(origin["s3_origin_access_identity_enabled"])
del origin["s3_origin_access_identity_enabled"]

if s3_origin_config:
oai = s3_origin_config
Expand Down Expand Up @@ -1766,7 +1766,7 @@ def validate_cache_behaviors(self, config, cache_behaviors, valid_origins, purge
all_cache_behaviors[cache_behavior['path_pattern']] = valid_cache_behavior
if purge_cache_behaviors:
for target_origin_id in set(all_cache_behaviors.keys()) - set([cb['path_pattern'] for cb in cache_behaviors]):
del(all_cache_behaviors[target_origin_id])
del all_cache_behaviors[target_origin_id]
return ansible_list_to_cloudfront_list(list(all_cache_behaviors.values()))
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution cache behaviors")
Expand Down Expand Up @@ -1954,7 +1954,7 @@ def validate_custom_error_responses(self, config, custom_error_responses, purge_
if 'response_code' in custom_error_response:
custom_error_response['response_code'] = str(custom_error_response['response_code'])
if custom_error_response['error_code'] in existing_responses:
del(existing_responses[custom_error_response['error_code']])
del existing_responses[custom_error_response['error_code']]
result.append(custom_error_response)
if not purge_custom_error_responses:
result.extend(existing_responses.values())
Expand Down Expand Up @@ -2261,7 +2261,7 @@ def main():

if 'distribution_config' in result:
result.update(result['distribution_config'])
del(result['distribution_config'])
del result['distribution_config']

module.exit_json(changed=changed, **result)

Expand Down
3 changes: 1 addition & 2 deletions plugins/modules/cloudfront_origin_access_identity.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,7 @@ def main():
else:
result = service_mgr.create_origin_access_identity(caller_reference, comment)
changed = True
elif(state == 'absent' and origin_access_identity_id is not None and
e_tag is not None):
elif state == 'absent' and origin_access_identity_id is not None and e_tag is not None:
result = service_mgr.delete_origin_access_identity(origin_access_identity_id, e_tag)
changed = True

Expand Down
6 changes: 2 additions & 4 deletions plugins/modules/cloudtrail.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def get_kms_key_aliases(module, client, keyId):
"""
try:
key_resp = client.list_aliases(KeyId=keyId)
except (BotoCoreError, ClientError) as err:
except (BotoCoreError, ClientError):
# Don't fail here, just return [] to maintain backwards compat
# in case user doesn't have kms:ListAliases permissions
return []
Expand Down Expand Up @@ -558,9 +558,7 @@ def main():
# all aliases for a match.
initial_aliases = get_kms_key_aliases(module, module.client('kms'), initial_kms_key_id)
for a in initial_aliases:
if(a['AliasName'] == new_key or
a['AliasArn'] == new_key or
a['TargetKeyId'] == new_key):
if a['AliasName'] == new_key or a['AliasArn'] == new_key or a['TargetKeyId'] == new_key:
results['changed'] = False

# Check if we need to start/stop logging
Expand Down
5 changes: 2 additions & 3 deletions plugins/modules/ec2_vpc_nacl.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,6 @@ def tags_changed(nacl_id, client, module):
tags = module.params.get('tags')
name = module.params.get('name')
purge_tags = module.params.get('purge_tags')
changed = False

if name is None and tags is None:
return False
Expand Down Expand Up @@ -337,14 +336,14 @@ def setup_network_acl(client, module):
replace_network_acl_association(nacl_id, subnets, client, module)
construct_acl_entries(nacl, client, module)
changed = True
return(changed, nacl['NetworkAcl']['NetworkAclId'])
return changed, nacl['NetworkAcl']['NetworkAclId']
else:
changed = False
nacl_id = nacl['NetworkAcls'][0]['NetworkAclId']
changed |= subnets_changed(nacl, client, module)
changed |= nacls_changed(nacl, client, module)
changed |= tags_changed(nacl_id, client, module)
return (changed, nacl_id)
return changed, nacl_id


def remove_network_acl(client, module):
Expand Down
2 changes: 1 addition & 1 deletion plugins/modules/eks_fargate_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def validate_tags(client, module, fargate_profile):
try:
existing_tags = client.list_tags_for_resource(resourceArn=fargate_profile['fargateProfileArn'])['tags']
tags_to_add, tags_to_remove = compare_aws_tags(existing_tags, desired_tags, module.params.get('purge_tags'))
except(botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg='Unable to list or compare tags for Fargate Profile %s' % module.params.get('name'))

if tags_to_remove:
Expand Down
13 changes: 5 additions & 8 deletions plugins/modules/redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ def create_cluster(module, redshift):
changed = True
resource = _describe_cluster(redshift, identifier)

return(changed, _collect_facts(resource))
return changed, _collect_facts(resource)


def describe_cluster(module, redshift):
Expand All @@ -470,7 +470,7 @@ def describe_cluster(module, redshift):
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Error describing cluster")

return(True, _collect_facts(resource))
return True, _collect_facts(resource)


def delete_cluster(module, redshift):
Expand Down Expand Up @@ -499,7 +499,7 @@ def delete_cluster(module, redshift):
ClusterIdentifier=identifier,
**snake_dict_to_camel_dict(params, capitalize_first=True))
except is_boto3_error_code('ClusterNotFound'):
return(False, {})
return False, {}
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed to delete cluster")

Expand All @@ -514,7 +514,7 @@ def delete_cluster(module, redshift):
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Timeout deleting the cluster")

return(True, {})
return True, {}


def modify_cluster(module, redshift):
Expand All @@ -528,9 +528,6 @@ def modify_cluster(module, redshift):
identifier = module.params.get('identifier')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
tags = module.params.get('tags')
purge_tags = module.params.get('purge_tags')
region = region = module.params.get('region')

# Package up the optional parameters
params = {}
Expand Down Expand Up @@ -594,7 +591,7 @@ def modify_cluster(module, redshift):
if _ensure_tags(redshift, identifier, resource['Tags'], module):
resource = redshift.describe_clusters(ClusterIdentifier=identifier)['Clusters'][0]

return(True, _collect_facts(resource))
return True, _collect_facts(resource)


def main():
Expand Down
29 changes: 10 additions & 19 deletions plugins/modules/s3_bucket_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ def get_bucket_list(module, connection, name="", name_filter=""):
final_buckets = filtered_buckets
else:
final_buckets = buckets
return(final_buckets)
return final_buckets


def get_buckets_facts(connection, buckets, requested_facts, transform_location):
Expand All @@ -457,7 +457,7 @@ def get_buckets_facts(connection, buckets, requested_facts, transform_location):
bucket.update(get_bucket_details(connection, bucket['name'], requested_facts, transform_location))
full_bucket_list.append(bucket)

return(full_bucket_list)
return full_bucket_list


def get_bucket_details(connection, name, requested_facts, transform_location):
Expand Down Expand Up @@ -490,7 +490,7 @@ def get_bucket_details(connection, name, requested_facts, transform_location):
except botocore.exceptions.ClientError:
pass

return(all_facts)
return all_facts


@AWSRetry.jittered_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket', 'OperationAborted'])
Expand All @@ -508,11 +508,8 @@ def get_bucket_location(name, connection, transform_location=False):
except KeyError:
pass
# Strip response metadata (not needed)
try:
data.pop('ResponseMetadata')
return(data)
except KeyError:
return(data)
data.pop('ResponseMetadata', None)
return data


@AWSRetry.jittered_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket', 'OperationAborted'])
Expand All @@ -524,14 +521,11 @@ def get_bucket_tagging(name, connection):

try:
bucket_tags = boto3_tag_list_to_ansible_dict(data['TagSet'])
return(bucket_tags)
return bucket_tags
except KeyError:
# Strip response metadata (not needed)
try:
data.pop('ResponseMetadata')
return(data)
except KeyError:
return(data)
data.pop('ResponseMetadata', None)
return data


@AWSRetry.jittered_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket', 'OperationAborted'])
Expand All @@ -544,11 +538,8 @@ def get_bucket_property(name, connection, get_api_name):
data = api_function(Bucket=name)

# Strip response metadata (not needed)
try:
data.pop('ResponseMetadata')
return(data)
except KeyError:
return(data)
data.pop('ResponseMetadata', None)
return data


def main():
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/plugins/connection/test_aws_ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def test_plugins_connection_aws_ssm_wrap_command(self):
conn = connection_loader.get('community.aws.aws_ssm', pc, new_stdin)
conn.is_windows = MagicMock()
conn.is_windows.return_value = True
return('windows1')
return 'windows1'

def test_plugins_connection_aws_ssm_post_process(self):
pc = PlayContext()
Expand All @@ -103,7 +103,7 @@ def test_plugins_connection_aws_ssm_post_process(self):
fail = 2
conn.stdout = MagicMock()
returncode = 0
return(returncode, conn.stdout)
return returncode, conn.stdout

@patch('subprocess.Popen')
def test_plugins_connection_aws_ssm_flush_stderr(self, s_popen):
Expand All @@ -114,7 +114,7 @@ def test_plugins_connection_aws_ssm_flush_stderr(self, s_popen):
conn.poll_stderr.register = MagicMock()
conn.stderr = None
s_popen.poll().return_value = 123
return(conn.stderr)
return conn.stderr

@patch('boto3.client')
def test_plugins_connection_aws_ssm_get_url(self, boto):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/plugins/modules/test_acm_certificate.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,4 @@ def test_chain_compare():
if expected != actual:
print("Error, unexpected comparison result between \n%s\nand\n%s" % (chain_a['path'], chain_b['path']))
print("Expected %s got %s" % (str(expected), str(actual)))
assert(expected == actual)
assert expected == actual

0 comments on commit 3d4736b

Please sign in to comment.