From 709b8603c072cf74f136daa640756eccba6849f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Magalh=C3=A3es?= Date: Mon, 31 Aug 2020 10:48:26 +0100 Subject: [PATCH] AWS ELB: Return empty list when no load balancer name was found When trying to describe a LoadBalancer that doesn't exist, the module crash. Instead of that behavior, this commit will return an empty list when no load balancer is found, allowing to deal next tasks by reading the output of the module. --- ...406-elb_classic_info-return-empty-list.yml | 2 + plugins/modules/elb_classic_lb_info.py | 32 +++-- .../targets/elb_classic_lb/aliases | 1 + .../targets/elb_classic_lb/tasks/main.yml | 127 +++++++++++++++++- 4 files changed, 149 insertions(+), 13 deletions(-) create mode 100644 changelogs/fragments/406-elb_classic_info-return-empty-list.yml diff --git a/changelogs/fragments/406-elb_classic_info-return-empty-list.yml b/changelogs/fragments/406-elb_classic_info-return-empty-list.yml new file mode 100644 index 00000000000..62eb1c30d13 --- /dev/null +++ b/changelogs/fragments/406-elb_classic_info-return-empty-list.yml @@ -0,0 +1,2 @@ +minor_changes: +- elb_classic_info - If the provided load balancer doesn't exist, return an empty list instead of throwing an error. (https://github.com/ansible-collections/community.aws/pull/215). diff --git a/plugins/modules/elb_classic_lb_info.py b/plugins/modules/elb_classic_lb_info.py index 12a6a43771a..a1a0c39e042 100644 --- a/plugins/modules/elb_classic_lb_info.py +++ b/plugins/modules/elb_classic_lb_info.py @@ -142,7 +142,7 @@ vpc_id: vpc-c248fda4 ''' -from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule +from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule, is_boto3_error_code from ansible_collections.amazon.aws.plugins.module_utils.ec2 import ( AWSRetry, camel_dict_to_snake_dict, @@ -154,14 +154,17 @@ except ImportError: pass # caught by AnsibleAWSModule +MAX_AWS_RETRIES = 5 +MAX_AWS_DELAY = 5 -@AWSRetry.backoff(tries=5, delay=5, backoff=2.0) -def list_elbs(connection, names): - paginator = connection.get_paginator('describe_load_balancers') - load_balancers = paginator.paginate(LoadBalancerNames=names).build_full_result().get('LoadBalancerDescriptions', []) + +def list_elbs(connection, load_balancer_names): results = [] - for lb in load_balancers: + for load_balancer_name in load_balancer_names: + lb = get_lb(connection, load_balancer_name) + if not lb: + continue description = camel_dict_to_snake_dict(lb) name = lb['LoadBalancerName'] instances = lb.get('Instances', []) @@ -174,13 +177,20 @@ def list_elbs(connection, names): return results -def get_lb_attributes(connection, name): - attributes = connection.describe_load_balancer_attributes(LoadBalancerName=name).get('LoadBalancerAttributes', {}) +def get_lb(connection, load_balancer_name): + try: + return connection.describe_load_balancers(aws_retry=True, LoadBalancerNames=[load_balancer_name])['LoadBalancerDescriptions'][0] + except is_boto3_error_code('LoadBalancerNotFound'): + return [] + + +def get_lb_attributes(connection, load_balancer_name): + attributes = connection.describe_load_balancer_attributes(aws_retry=True, LoadBalancerName=load_balancer_name).get('LoadBalancerAttributes', {}) return camel_dict_to_snake_dict(attributes) def get_tags(connection, load_balancer_name): - tags = connection.describe_tags(LoadBalancerNames=[load_balancer_name])['TagDescriptions'] + tags = connection.describe_tags(aws_retry=True, LoadBalancerNames=[load_balancer_name])['TagDescriptions'] if not tags: return {} return boto3_tag_list_to_ansible_dict(tags[0]['Tags']) @@ -194,14 +204,14 @@ def lb_instance_health(connection, load_balancer_name, instances, state): def main(): argument_spec = dict( - names={'default': [], 'type': 'list', 'elements': 'str'} + names=dict(default=[], type='list', elements='str') ) module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True) if module._name == 'elb_classic_lb_facts': module.deprecate("The 'elb_classic_lb_facts' module has been renamed to 'elb_classic_lb_info'", date='2021-12-01', collection_name='community.aws') - connection = module.client('elb') + connection = module.client('elb', retry_decorator=AWSRetry.jittered_backoff(retries=MAX_AWS_RETRIES, delay=MAX_AWS_DELAY)) try: elbs = list_elbs(connection, module.params.get('names')) diff --git a/tests/integration/targets/elb_classic_lb/aliases b/tests/integration/targets/elb_classic_lb/aliases index 6e3860bee23..2468bb7a199 100644 --- a/tests/integration/targets/elb_classic_lb/aliases +++ b/tests/integration/targets/elb_classic_lb/aliases @@ -1,2 +1,3 @@ +elb_classic_lb_info cloud/aws shippable/aws/group2 diff --git a/tests/integration/targets/elb_classic_lb/tasks/main.yml b/tests/integration/targets/elb_classic_lb/tasks/main.yml index 2b368c6f9e8..b0be664a984 100644 --- a/tests/integration/targets/elb_classic_lb/tasks/main.yml +++ b/tests/integration/targets/elb_classic_lb/tasks/main.yml @@ -13,6 +13,9 @@ # remove listeners # remove elb +# __elb_classic_lb_info_ +# get nonexistent load balancer + # __ec2-common__ # test environment variable EC2_REGION # test with no parameters @@ -71,6 +74,32 @@ - '[80, 80, "HTTP", "HTTP"] in info.elb.listeners' - '[8080, 8080, "HTTP", "HTTP"] in info.elb.listeners' + - name: Get ELB info + elb_classic_lb_info: + names: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'info.elbs|length == 1' + - 'elb.availability_zones|length == 2' + - '"{{ ec2_region }}a" in elb.availability_zones' + - '"{{ ec2_region }}b" in elb.availability_zones' + - 'elb.health_check.healthy_threshold == 10' + - 'elb.health_check.interval == 30' + - 'elb.health_check.target == "HTTP:80/index.html"' + - 'elb.health_check.timeout == 5' + - 'elb.health_check.unhealthy_threshold == 2' + - '{"instance_port": 80, "instance_protocol": "HTTP", "load_balancer_port": 80, "protocol": "HTTP"} == listeners[0]' + - '{"instance_port": 8080, "instance_protocol": "HTTP", "load_balancer_port": 8080, "protocol": "HTTP"} == listeners[1]' + vars: + elb: "{{ info.elbs[0] }}" + listeners: "{{ elb.listener_descriptions|map(attribute='listener')|sort(attribute='load_balancer_port') }}" + # ============================================================ # check ports, would be cool, but we are at the mercy of AWS @@ -119,14 +148,28 @@ healthy_threshold: 10 register: info - - - assert: that: - 'info.elb.status == "ok"' - 'info.changed' - 'info.elb.zones[0] == "{{ ec2_region }}c"' + - name: Get ELB info after changing AZ's + elb_classic_lb_info: + names: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'elb.availability_zones|length == 1' + - '"{{ ec2_region }}c" in elb.availability_zones[0]' + vars: + elb: "{{ info.elbs[0] }}" + # ============================================================ # Update AZ's @@ -158,6 +201,23 @@ - '"{{ ec2_region }}b" in info.elb.zones' - '"{{ ec2_region }}c" in info.elb.zones' + - name: Get ELB info after updating AZ's + elb_classic_lb_info: + names: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'elb.availability_zones|length == 3' + - '"{{ ec2_region }}a" in elb.availability_zones' + - '"{{ ec2_region }}b" in elb.availability_zones' + - '"{{ ec2_region }}c" in elb.availability_zones' + vars: + elb: "{{ info.elbs[0] }}" # ============================================================ @@ -189,6 +249,21 @@ - '[80, 81, "HTTP", "HTTP"] in info.elb.listeners' - 'info.elb.listeners|length == 1' + - name: Get ELB info after purging listeners + elb_classic_lb_info: + names: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'elb.listener_descriptions|length == 1' + - '{"instance_port": 81, "instance_protocol": "HTTP", "load_balancer_port": 80, "protocol": "HTTP"} == elb.listener_descriptions[0].listener' + vars: + elb: "{{ info.elbs[0] }}" # ============================================================ @@ -222,6 +297,54 @@ - '[8081, 8081, "HTTP", "HTTP"] in info.elb.listeners' - 'info.elb.listeners|length == 2' + - name: Get ELB info after adding listeners + elb_classic_lb_info: + names: "{{ tag_prefix }}" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'elb.listener_descriptions|length == 2' + - '{"instance_port": 81, "instance_protocol": "HTTP", "load_balancer_port": 80, "protocol": "HTTP"} == listeners[0]' + - '{"instance_port": 8081, "instance_protocol": "HTTP", "load_balancer_port": 8081, "protocol": "HTTP"} == listeners[1]' + vars: + elb: "{{ info.elbs[0] }}" + listeners: "{{ elb.listener_descriptions|map(attribute='listener')|sort(attribute='load_balancer_port') }}" + + # ============================================================ + + # Test getting nonexistent load balancer + - name: get nonexistent load balancer + elb_classic_lb_info: + names: "invalid-elb" + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'info.elbs|length==0' + + # Test getting a valid and nonexistent load balancer + - name: get nonexistent load balancer + elb_classic_lb_info: + names: ["{{ tag_prefix }}", "invalid-elb"] + region: "{{ ec2_region }}" + ec2_access_key: "{{ ec2_access_key }}" + ec2_secret_key: "{{ ec2_secret_key }}" + security_token: "{{ security_token }}" + register: info + + - assert: + that: + - 'info.elbs|length==1' + - 'info.elbs[0].load_balancer_name == "{{ tag_prefix }}"' # ============================================================