Skip to content

Commit

Permalink
Minor linting fixups - 2022-08-12 (#1408)
Browse files Browse the repository at this point in the history
Minor linting fixups - 2022-08-12

SUMMARY
Various linting and unit test fixups

unused variables
overly broad Exception catching (highlighted some broken tests)
removes direct use of unittest in favour of pytest (see also ansible-collections/amazon.aws#961)
cleans up skipping of tests when botocore/boto3 aren't installed
passes error message from VPNConnectionException into its super to make testing easier, should never be directly exposed to the user

Removes tests for 3 modules which now have integration tests, they're either recording based (fragile)or test things which are separately tested in the integration tests.

lambda
s3_bucket_notifications
route53_zone

ISSUE TYPE

Feature Pull Request

COMPONENT NAME
plugins/modules/ec2_vpc_vpn.py
tests/unit
ADDITIONAL INFORMATION

Reviewed-by: Alina Buzachis <None>
  • Loading branch information
tremble authored Aug 17, 2022
1 parent 3d4736b commit 8b4afa9
Show file tree
Hide file tree
Showing 16 changed files with 192 additions and 1,190 deletions.
3 changes: 3 additions & 0 deletions changelogs/fragments/1407-ec2_vpc_vpn.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
minor_changes:
- ec2_vpc_vpn - minor tweak to ``VPNConnectionException`` to pass message through to the superclass
(https://github.com/ansible-collections/community.aws/pull/1407).
1 change: 1 addition & 0 deletions plugins/modules/ec2_vpc_vpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@

class VPNConnectionException(Exception):
def __init__(self, msg, exception=None):
super(VPNConnectionException, self).__init__(msg)
self.msg = msg
self.exception = exception

Expand Down
2 changes: 1 addition & 1 deletion plugins/modules/lambda_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
enabled:
description:
- Indicates whether AWS Lambda should begin polling or readin from the event source.
default: true.
default: true
type: bool
batch_size:
description:
Expand Down
47 changes: 20 additions & 27 deletions tests/unit/plugins/connection/test_aws_ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,18 @@

from io import StringIO
import pytest
import sys
from ansible_collections.community.aws.tests.unit.compat import unittest

from ansible_collections.community.aws.tests.unit.compat.mock import patch, MagicMock
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader

from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3

if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_data_pipeline.py requires the python modules 'boto3' and 'botocore'")


@pytest.mark.skipif(sys.version_info < (2, 7), reason="requires Python 2.7 or higher")
class TestConnectionBaseClass(unittest.TestCase):
class TestConnectionBaseClass():

@patch('os.path.exists')
@patch('subprocess.Popen')
Expand Down Expand Up @@ -55,25 +58,19 @@ def test_plugins_connection_aws_ssm_exec_command(self, r_choice):
conn._flush_stderr = MagicMock()
conn._windows = MagicMock()
conn._windows.return_value = True
sudoable = True
conn._session.poll = MagicMock()
conn._session.poll.return_value = None
remaining = 0
conn._timeout = MagicMock()
conn._poll_stdout = MagicMock()
conn._poll_stdout.poll = MagicMock()
conn._poll_stdout.poll.return_value = True
conn._session.stdout = MagicMock()
conn._session.stdout.readline = MagicMock()
begin = True
mark_end = 'a'
line = ['a', 'b']
conn._post_process = MagicMock()
conn._post_process.return_value = 'test'
conn._session.stdout.readline.side_effect = iter(['aaaaa\n', 'Hi\n', '0\n', 'bbbbb\n'])
conn.get_option = MagicMock()
conn.get_option.return_value = 1
cmd = MagicMock()
returncode = 'a'
stdout = 'b'
return (returncode, stdout, conn._flush_stderr)
Expand All @@ -99,8 +96,6 @@ def test_plugins_connection_aws_ssm_post_process(self):
conn = connection_loader.get('community.aws.aws_ssm', pc, new_stdin)
conn.is_windows = MagicMock()
conn.is_windows.return_value = True
success = 3
fail = 2
conn.stdout = MagicMock()
returncode = 0
return returncode, conn.stdout
Expand All @@ -116,15 +111,15 @@ def test_plugins_connection_aws_ssm_flush_stderr(self, s_popen):
s_popen.poll().return_value = 123
return conn.stderr

@patch('boto3.client')
def test_plugins_connection_aws_ssm_get_url(self, boto):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('community.aws.aws_ssm', pc, new_stdin)
boto3 = MagicMock()
boto3.client('s3').return_value = MagicMock()
boto3.generate_presigned_url.return_value = MagicMock()
return (boto3.generate_presigned_url.return_value)
# XXX This isn't doing anything
# def test_plugins_connection_aws_ssm_get_url(self):
# pc = PlayContext()
# new_stdin = StringIO()
# connection_loader.get('community.aws.aws_ssm', pc, new_stdin)
# boto3 = MagicMock()
# boto3.client('s3').return_value = MagicMock()
# boto3.generate_presigned_url.return_value = MagicMock()
# return (boto3.generate_presigned_url.return_value)

@patch('os.path.exists')
def test_plugins_connection_aws_ssm_put_file(self, mock_ospe):
Expand All @@ -134,7 +129,7 @@ def test_plugins_connection_aws_ssm_put_file(self, mock_ospe):
conn._connect = MagicMock()
conn._file_transport_command = MagicMock()
conn._file_transport_command.return_value = (0, 'stdout', 'stderr')
res, stdout, stderr = conn.put_file('/in/file', '/out/file')
conn.put_file('/in/file', '/out/file')

def test_plugins_connection_aws_ssm_fetch_file(self):
pc = PlayContext()
Expand All @@ -143,7 +138,7 @@ def test_plugins_connection_aws_ssm_fetch_file(self):
conn._connect = MagicMock()
conn._file_transport_command = MagicMock()
conn._file_transport_command.return_value = (0, 'stdout', 'stderr')
res, stdout, stderr = conn.fetch_file('/in/file', '/out/file')
conn.fetch_file('/in/file', '/out/file')

@patch('subprocess.check_output')
@patch('boto3.client')
Expand All @@ -158,15 +153,13 @@ def test_plugins_connection_file_transport_command(self, boto_client, s_check_ou
boto3 = MagicMock()
boto3.client('s3').return_value = MagicMock()
conn.get_option.return_value = 1
ssm_action = 'get'
get_command = MagicMock()
put_command = MagicMock()
conn.exec_command = MagicMock()
conn.exec_command.return_value = (put_command, None, False)
conn.download_fileobj = MagicMock()
(returncode, stdout, stderr) = conn.exec_command(put_command, in_data=None, sudoable=False)
returncode = 0
(returncode, stdout, stderr) = conn.exec_command(get_command, in_data=None, sudoable=False)
conn.exec_command(put_command, in_data=None, sudoable=False)
conn.exec_command(get_command, in_data=None, sudoable=False)

@patch('subprocess.check_output')
def test_plugins_connection_aws_ssm_close(self, s_check_output):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
{
"status_code": 200,
"status_code": 400,
"data": {
"Error": {
"Message": "Pipeline definition is empty: ",
"Code": "InvalidRequestException"
},
"ResponseMetadata": {
"RequestId": "f6f52c07-68c8-11e7-b9c1-53eb374c7a33",
"HTTPHeaders": {
Expand All @@ -10,7 +14,7 @@
"content-length": "2"
},
"RetryAttempts": 0,
"HTTPStatusCode": 200
"HTTPStatusCode": 400
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,59 @@
"CustomerGatewayId": "cgw-6113c87f",
"VpnConnectionId": "vpn-9f06e28a",
"Category": "VPN",
"CustomerGatewayConfiguration": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<vpn_connection id=\"vpn-9f06e28a\">\n <customer_gateway_id>cgw-6113c87f</customer_gateway_id>\n <vpn_gateway_id>vgw-35d70c2b</vpn_gateway_id>\n <vpn_connection_type>ipsec.1</vpn_connection_type>\n <vpn_connection_attributes>NoBGPVPNConnection</vpn_connection_attributes>\n <ipsec_tunnel>\n <customer_gateway>\n <tunnel_outside_address>\n <ip_address>9.8.7.6</ip_address>\n </tunnel_outside_address>\n <tunnel_inside_address>\n <ip_address>169.254.15.114</ip_address>\n <network_mask>255.255.255.252</network_mask>\n <network_cidr>30</network_cidr>\n </tunnel_inside_address>\n </customer_gateway>\n <vpn_gateway>\n <tunnel_outside_address>\n <ip_address>52.43.202.248</ip_address>\n </tunnel_outside_address>\n <tunnel_inside_address>\n <ip_address>169.254.15.113</ip_address>\n <network_mask>255.255.255.252</network_mask>\n <network_cidr>30</network_cidr>\n </tunnel_inside_address>\n </vpn_gateway>\n <ike>\n <authentication_protocol>sha1</authentication_protocol>\n <encryption_protocol>aes-128-cbc</encryption_protocol>\n <lifetime>28800</lifetime>\n <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n <mode>main</mode>\n <pre_shared_key>spNJGPBQfzbK8hNvpHNOaaml_paRZNKs</pre_shared_key>\n </ike>\n <ipsec>\n <protocol>esp</protocol>\n <authentication_protocol>hmac-sha1-96</authentication_protocol>\n <encryption_protocol>aes-128-cbc</encryption_protocol>\n <lifetime>3600</lifetime>\n <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n <mode>tunnel</mode>\n <clear_df_bit>true</clear_df_bit>\n <fragmentation_before_encryption>true</fragmentation_before_encryption>\n <tcp_mss_adjustment>1379</tcp_mss_adjustment>\n <dead_peer_detection>\n <interval>10</interval>\n <retries>3</retries>\n </dead_peer_detection>\n </ipsec>\n </ipsec_tunnel>\n <ipsec_tunnel>\n <customer_gateway>\n <tunnel_outside_address>\n <ip_address>9.8.7.6</ip_address>\n </tunnel_outside_address>\n <tunnel_inside_address>\n <ip_address>169.254.14.126</ip_address>\n <network_mask>255.255.255.252</network_mask>\n <network_cidr>30</network_cidr>\n </tunnel_inside_address>\n </customer_gateway>\n <vpn_gateway>\n <tunnel_outside_address>\n <ip_address>54.70.185.193</ip_address>\n </tunnel_outside_address>\n <tunnel_inside_address>\n <ip_address>169.254.14.125</ip_address>\n <network_mask>255.255.255.252</network_mask>\n <network_cidr>30</network_cidr>\n </tunnel_inside_address>\n </vpn_gateway>\n <ike>\n <authentication_protocol>sha1</authentication_protocol>\n <encryption_protocol>aes-128-cbc</encryption_protocol>\n <lifetime>28800</lifetime>\n <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n <mode>main</mode>\n <pre_shared_key>yE7hnuavW4SzersgnMyKIoKbd0rE8giW</pre_shared_key>\n </ike>\n <ipsec>\n <protocol>esp</protocol>\n <authentication_protocol>hmac-sha1-96</authentication_protocol>\n <encryption_protocol>aes-128-cbc</encryption_protocol>\n <lifetime>3600</lifetime>\n <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n <mode>tunnel</mode>\n <clear_df_bit>true</clear_df_bit>\n <fragmentation_before_encryption>true</fragmentation_before_encryption>\n <tcp_mss_adjustment>1379</tcp_mss_adjustment>\n <dead_peer_detection>\n <interval>10</interval>\n <retries>3</retries>\n </dead_peer_detection>\n </ipsec>\n </ipsec_tunnel>\n</vpn_connection>",
"Routes": [],
"Options": {
"StaticRoutesOnly": true
},
"Type": "ipsec.1",
"VgwTelemetry": [
{
"StatusMessage": "",
"Status": "DOWN",
"OutsideIpAddress": "52.43.202.248",
"AcceptedRouteCount": 0,
"LastStatusChange": {
"year": 2018,
"hour": 13,
"second": 3,
"minute": 9,
"__class__": "datetime",
"day": 16,
"month": 4,
"microsecond": 0
}
},
{
"StatusMessage": "",
"Status": "DOWN",
"OutsideIpAddress": "54.70.185.193",
"AcceptedRouteCount": 0,
"LastStatusChange": {
"year": 2018,
"hour": 13,
"second": 26,
"minute": 8,
"__class__": "datetime",
"day": 16,
"month": 4,
"microsecond": 0
}
}
],
"VpnGatewayId": "vgw-35d70c2b",
"State": "deleted"
"State": "available"
}
],
"ResponseMetadata": {
"HTTPStatusCode": 200,
"RequestId": "edac5b19-14be-4b2d-ab47-a2279de47d40",
"RequestId": "1005f435-3e86-461f-a96c-a4f45c32c795",
"HTTPHeaders": {
"content-length": "705",
"vary": "Accept-Encoding",
"content-length": "6124",
"server": "AmazonEC2",
"content-type": "text/xml;charset=UTF-8",
"date": "Mon, 16 Apr 2018 13:09:25 GMT"
"date": "Mon, 16 Apr 2018 13:09:24 GMT"
},
"RetryAttempts": 0
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"data": {
"VpnConnections": [
{
"CustomerGatewayId": "cgw-6113c87f",
"VpnConnectionId": "vpn-9f06e28a",
"Category": "VPN",
"Routes": [],
"Options": {
"StaticRoutesOnly": true
},
"Type": "ipsec.1",
"VpnGatewayId": "vgw-35d70c2b",
"State": "deleted"
}
],
"ResponseMetadata": {
"HTTPStatusCode": 200,
"RequestId": "edac5b19-14be-4b2d-ab47-a2279de47d40",
"HTTPHeaders": {
"content-length": "705",
"server": "AmazonEC2",
"content-type": "text/xml;charset=UTF-8",
"date": "Mon, 16 Apr 2018 13:09:25 GMT"
},
"RetryAttempts": 0
}
},
"status_code": 200
}
38 changes: 25 additions & 13 deletions tests/unit/plugins/modules/test_data_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,24 @@

from ansible.module_utils._text import to_text

try:
import boto3
except ImportError:
pass

# Magic... Incorrectly identified by pylint as unused
from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import maybe_sleep # pylint: disable=unused-import
from ansible_collections.amazon.aws.tests.unit.utils.amazon_placebo_fixtures import placeboify # pylint: disable=unused-import

from ansible_collections.amazon.aws.plugins.module_utils.botocore import HAS_BOTO3
from ansible_collections.community.aws.plugins.modules import data_pipeline

# test_api_gateway.py requires the `boto3` and `botocore` modules
boto3 = pytest.importorskip('boto3')
if not HAS_BOTO3:
pytestmark = pytest.mark.skip("test_data_pipeline.py requires the python modules 'boto3' and 'botocore'")


class FailException(Exception):
pass


@pytest.fixture(scope='module')
Expand Down Expand Up @@ -62,7 +72,7 @@ def dp_setup():
yield Dependencies(module=module, data_pipeline_id='df-0590406117G8DPQZY2HA', objects=objects)
else:
connection = boto3.client('datapipeline')
changed, result = data_pipeline.create_pipeline(connection, module)
_changed, result = data_pipeline.create_pipeline(connection, module)
data_pipeline_id = result['data_pipeline']['pipeline_id']
yield Dependencies(module=module, data_pipeline_id=data_pipeline_id, objects=objects)

Expand All @@ -79,7 +89,7 @@ def __init__(self, **kwargs):
def fail_json(self, *args, **kwargs):
self.exit_args = args
self.exit_kwargs = kwargs
raise Exception('FAIL')
raise FailException('FAIL')

def exit_json(self, *args, **kwargs):
self.exit_args = args
Expand All @@ -102,20 +112,23 @@ def test_pipeline_field(placeboify, maybe_sleep, dp_setup):
def test_define_pipeline(placeboify, maybe_sleep, dp_setup):
connection = placeboify.client('datapipeline')
changed, result = data_pipeline.define_pipeline(connection, dp_setup.module, dp_setup.objects, dp_setup.data_pipeline_id)
assert changed is True
assert 'has been updated' in result


def test_deactivate_pipeline(placeboify, maybe_sleep, dp_setup):
connection = placeboify.client('datapipeline')
changed, result = data_pipeline.deactivate_pipeline(connection, dp_setup.module)
_changed, result = data_pipeline.deactivate_pipeline(connection, dp_setup.module)
# XXX possible bug
# assert changed is True
assert "Data Pipeline ansible-test-create-pipeline deactivated" in result['msg']


def test_activate_without_population(placeboify, maybe_sleep, dp_setup):
connection = placeboify.client('datapipeline')
with pytest.raises(Exception) as error_message:
changed, result = data_pipeline.activate_pipeline(connection, dp_setup.module)
assert error_message == "You need to populate your pipeline before activation."
with pytest.raises(FailException):
_changed, _result = data_pipeline.activate_pipeline(connection, dp_setup.module)
assert dp_setup.module.exit_kwargs.get('msg') == "You need to populate your pipeline before activation."


def test_create_pipeline(placeboify, maybe_sleep):
Expand Down Expand Up @@ -157,7 +170,7 @@ def test_delete_nonexistent_pipeline(placeboify, maybe_sleep):
'tags': {'ansible': 'test'},
'timeout': 300}
m = FakeModule(**params)
changed, result = data_pipeline.delete_pipeline(connection, m)
changed, _result = data_pipeline.delete_pipeline(connection, m)
assert changed is False


Expand All @@ -171,7 +184,7 @@ def test_delete_pipeline(placeboify, maybe_sleep):
'timeout': 300}
m = FakeModule(**params)
data_pipeline.create_pipeline(connection, m)
changed, result = data_pipeline.delete_pipeline(connection, m)
changed, _result = data_pipeline.delete_pipeline(connection, m)
assert changed is True


Expand Down Expand Up @@ -217,9 +230,8 @@ def test_pipeline_description(placeboify, maybe_sleep, dp_setup):
def test_pipeline_description_nonexistent(placeboify, maybe_sleep):
hypothetical_pipeline_id = "df-015440025PF7YGLDK47C"
connection = placeboify.client('datapipeline')
with pytest.raises(Exception) as error:
with pytest.raises(data_pipeline.DataPipelineNotFound):
data_pipeline.pipeline_description(connection, hypothetical_pipeline_id)
assert error == data_pipeline.DataPipelineNotFound


def test_check_dp_exists_true(placeboify, maybe_sleep, dp_setup):
Expand Down Expand Up @@ -249,5 +261,5 @@ def test_activate_pipeline(placeboify, maybe_sleep, dp_setup):
module=dp_setup.module,
objects=dp_setup.objects,
dp_id=dp_setup.data_pipeline_id)
changed, result = data_pipeline.activate_pipeline(connection, dp_setup.module)
changed, _result = data_pipeline.activate_pipeline(connection, dp_setup.module)
assert changed is True
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def test_associations_are_not_updated(placeboify, maybe_sleep):
def test_create_and_delete(placeboify, maybe_sleep):
client = placeboify.client('directconnect')
created_conn = verify_create_works(placeboify, maybe_sleep, client)
deleted_conn = verify_delete_works(placeboify, maybe_sleep, client, created_conn)
verify_delete_works(placeboify, maybe_sleep, client, created_conn)


def verify_create_works(placeboify, maybe_sleep, client):
Expand Down
Loading

0 comments on commit 8b4afa9

Please sign in to comment.