From 7495fd3fe7b097c06c27b00d1fe73ca3d1a3821f Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 17 Nov 2021 09:07:47 +0000 Subject: [PATCH] Switching to logger rather than print --- .../configure_account_alias.py | 11 +++-- .../configure_account_ou.py | 5 ++- .../configure_account_tags.py | 11 +++-- .../account_processing/create_account.py | 5 ++- .../account_processing/delete_default_vpc.py | 5 ++- .../account_processing/get_account_regions.py | 7 ++- .../process_account_files.py | 27 +++++++++--- .../tests/test_account_file_processing.py | 43 +++++++++++-------- 8 files changed, 77 insertions(+), 37 deletions(-) diff --git a/src/lambda_codebase/account_processing/configure_account_alias.py b/src/lambda_codebase/account_processing/configure_account_alias.py index 1252d634d..611ada96e 100644 --- a/src/lambda_codebase/account_processing/configure_account_alias.py +++ b/src/lambda_codebase/account_processing/configure_account_alias.py @@ -8,14 +8,16 @@ import os from sts import STS from aws_xray_sdk.core import patch_all +from logger import configure_logger patch_all() +LOGGER = configure_logger(__name__) ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME") def create_account_alias(account, iam_client): - print( + LOGGER.info( f"Ensuring Account: {account.get('account_full_name')} has alias {account.get('alias')}" ) try: @@ -27,9 +29,6 @@ def create_account_alias(account, iam_client): def lambda_handler(event, _): if event.get("alias"): - print( - f"Ensuring Account: {event.get('account_full_name')} has alias {event.get('alias')}" - ) sts = STS() account_id = event.get("Id") role = sts.assume_cross_account_role( @@ -38,7 +37,7 @@ def lambda_handler(event, _): ) create_account_alias(event, role.client("iam")) else: - print( - f"Ensuring Account: {event.get('account_full_name')} does not need an alias" + LOGGER.info( + f"Account: {event.get('account_full_name')} does not need an alias" ) return event diff --git a/src/lambda_codebase/account_processing/configure_account_ou.py b/src/lambda_codebase/account_processing/configure_account_ou.py index 4e6b7821d..cf50d1f3c 100644 --- a/src/lambda_codebase/account_processing/configure_account_ou.py +++ b/src/lambda_codebase/account_processing/configure_account_ou.py @@ -7,12 +7,15 @@ from organizations import Organizations import boto3 from aws_xray_sdk.core import patch_all +from logger import configure_logger + patch_all() +LOGGER = configure_logger(__name__) def lambda_handler(event, _): - print( + LOGGER.info( f"Ensuring Account: {event.get('account_full_name')} is in OU {event.get('organizational_unit_path')}" ) organizations = Organizations(boto3) diff --git a/src/lambda_codebase/account_processing/configure_account_tags.py b/src/lambda_codebase/account_processing/configure_account_tags.py index 4db4022f1..960eff9fa 100644 --- a/src/lambda_codebase/account_processing/configure_account_tags.py +++ b/src/lambda_codebase/account_processing/configure_account_tags.py @@ -12,23 +12,26 @@ import boto3 from aws_xray_sdk.core import patch_all +from logger import configure_logger patch_all() +LOGGER = configure_logger(__name__) + def create_account_tags(account_id, tags, org_session: Organizations): + LOGGER.info( + f"Ensuring Account: {account_id} has tags: {tags}" + ) org_session.create_account_tags(account_id, tags) def lambda_handler(event, _): if event.get("tags"): - print( - f"Ensuring Account: {event.get('account_full_name')} has tags {event.get('tags')}" - ) organizations = Organizations(boto3) create_account_tags(event.get("Id"), event.get("tags"), organizations) else: - print( + LOGGER.info( f"Account: {event.get('account_full_name')} does not need tags configured" ) return event diff --git a/src/lambda_codebase/account_processing/create_account.py b/src/lambda_codebase/account_processing/create_account.py index da1e7b387..9367566cb 100644 --- a/src/lambda_codebase/account_processing/create_account.py +++ b/src/lambda_codebase/account_processing/create_account.py @@ -8,12 +8,16 @@ import os from aws_xray_sdk.core import patch_all import boto3 +from logger import configure_logger patch_all() + +LOGGER = configure_logger(__name__) ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME") def create_account(account, adf_role_name, org_client): + LOGGER.info(f"Creating account {account.get('account_full_name')}") allow_billing = "ALLOW" if account.get("allow_billing", False) else "DENY" response = org_client.create_account( Email=account.get("email"), @@ -35,6 +39,5 @@ def create_account(account, adf_role_name, org_client): def lambda_handler(event, _): - print(f"Creating account {event.get('account_full_name')}") org_client = boto3.client("organizations") return create_account(event, ADF_ROLE_NAME, org_client) diff --git a/src/lambda_codebase/account_processing/delete_default_vpc.py b/src/lambda_codebase/account_processing/delete_default_vpc.py index b3a9aa78b..357b999f2 100644 --- a/src/lambda_codebase/account_processing/delete_default_vpc.py +++ b/src/lambda_codebase/account_processing/delete_default_vpc.py @@ -7,14 +7,17 @@ import os from sts import STS from aws_xray_sdk.core import patch_all +from logger import configure_logger patch_all() + +LOGGER = configure_logger(__name__) ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME") def lambda_handler(event, _): event = event.get("Payload") - print(f"Deleting Default vpc: {event.get('account_full_name')}") + LOGGER.info(f"Deleting Default vpc: {event.get('account_full_name')}") sts = STS() account_id = event.get("account") diff --git a/src/lambda_codebase/account_processing/get_account_regions.py b/src/lambda_codebase/account_processing/get_account_regions.py index d4b4b215f..8bfabc181 100644 --- a/src/lambda_codebase/account_processing/get_account_regions.py +++ b/src/lambda_codebase/account_processing/get_account_regions.py @@ -8,13 +8,16 @@ import os from sts import STS from aws_xray_sdk.core import patch_all +from logger import configure_logger patch_all() + +LOGGER = configure_logger(__name__) ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME") def lambda_handler(event, _): - print(f"Fetching Default regions {event.get('account_full_name')}") + LOGGER.info(f"Fetching Default regions {event.get('account_full_name')}") sts = STS() account_id = event.get("Id") role = sts.assume_cross_account_role( @@ -37,6 +40,6 @@ def lambda_handler(event, _): ], )["Regions"] ] - print(default_regions) + LOGGER.debug(f"Default regions for {account_id}: {default_regions}") event["default_regions"] = default_regions return event diff --git a/src/lambda_codebase/account_processing/process_account_files.py b/src/lambda_codebase/account_processing/process_account_files.py index 9cf385fc4..f0beb8134 100644 --- a/src/lambda_codebase/account_processing/process_account_files.py +++ b/src/lambda_codebase/account_processing/process_account_files.py @@ -11,28 +11,43 @@ import os from typing import Tuple import yaml +from yaml.error import YAMLError import boto3 +from botocore.exceptions import ClientError from aws_xray_sdk.core import patch_all from organizations import Organizations +from logger import configure_logger patch_all() + +LOGGER = configure_logger(__name__) ACCOUNT_MANAGEMENT_STATEMACHINE = os.getenv("ACCOUNT_MANAGEMENT_STATEMACHINE_ARN") def get_details_from_event(event: dict): s3_details = event.get("Records", [{}])[0].get("s3") + if not s3_details: + raise ValueError("No S3 Event details present in event trigger") bucket_name = s3_details.get("bucket", {}).get("name") object_key = s3_details.get("object", {}).get("key") - return bucket_name, object_key + return {"bucket_name":bucket_name, "object_key":object_key} -def get_file_from_s3(s3_object: Tuple, s3_client: boto3.resource): +def get_file_from_s3(s3_object: dict, s3_client: boto3.resource): bucket_name, object_key = s3_object s3_object = s3_client.Object(bucket_name, object_key) - s3_object.download_file(f"/tmp/{object_key}") - with open(f"/tmp/{object_key}", encoding="utf-8") as data_stream: - data = yaml.safe_load(data_stream) + try: + s3_object.download_file(f"/tmp/{object_key}") + except ClientError as e: + LOGGER.error(f"Failed to download {object_key} from {bucket_name}") + raise e + try: + with open(f"/tmp/{object_key}", encoding="utf-8") as data_stream: + data = yaml.safe_load(data_stream) + except YAMLError as p_e: + LOGGER.error("Failed to parse YAML file") + raise p_e return data @@ -60,7 +75,9 @@ def process_account_list(all_accounts, accounts_in_file): def start_executions(sfn, processed_account_list): + LOGGER.info(f"Invoking Account Management State Machine ({ACCOUNT_MANAGEMENT_STATEMACHINE})") for account in processed_account_list: + LOGGER.debug(f"Payload: {account}") sfn.start_execution( stateMachineArn=ACCOUNT_MANAGEMENT_STATEMACHINE, input=f"{json.dumps(account)}", diff --git a/src/lambda_codebase/account_processing/tests/test_account_file_processing.py b/src/lambda_codebase/account_processing/tests/test_account_file_processing.py index ba1ca733f..d62cb877c 100644 --- a/src/lambda_codebase/account_processing/tests/test_account_file_processing.py +++ b/src/lambda_codebase/account_processing/tests/test_account_file_processing.py @@ -1,24 +1,33 @@ """ Tests the account file processing lambda """ +import unittest +from ..process_account_files import process_account, process_account_list, get_details_from_event -from ..process_account_files import process_account, process_account_list +class SuccessTestCase(unittest.TestCase): + # pylint: disable=W0106 + def test_process_account_when_account_exists(self): + test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"} + account_lookup = {"mytestaccountname":1234567890} + self.assertDictEqual(process_account(account_lookup, test_account), {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "Id": 1234567890, "needs_created": False}) -# pylint: disable=W0106 -def test_process_account_when_account_exists(): - test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"} - account_lookup = {"mytestaccountname":1234567890} - assert process_account(account_lookup, test_account) == {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "Id": 1234567890, "needs_created": False} + def test_process_account_when_account_doesnt_exist(self): + test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"} + account_lookup = {"mydifferentaccount":1234567890} + self.assertDictEqual(process_account(account_lookup, test_account), {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "needs_created": True}) -def test_process_account_when_account_doesnt_exist(): - test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"} - account_lookup = {"mydifferentaccount":1234567890} - assert process_account(account_lookup, test_account) == {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "needs_created": True} + def test_process_account_list(self): + all_accounts = [{"Name":"mytestaccountname", "Id":1234567890}] + accounts_in_file = [{"account_full_name": "mytestaccountname"}, {"account_full_name": "mynewaccountname"}] + self.assertListEqual(process_account_list(all_accounts, accounts_in_file), [ + {"account_full_name":"mytestaccountname", "needs_created": False, "Id": 1234567890}, + {"account_full_name":"mynewaccountname", "needs_created": True} + ]) -def test_process_account_list(): - all_accounts = [{"Name":"mytestaccountname", "Id":1234567890}] - accounts_in_file = [{"account_full_name": "mytestaccountname"}, {"account_full_name": "mynewaccountname"}] - assert process_account_list(all_accounts, accounts_in_file) == [ - {"account_full_name":"mytestaccountname", "needs_created": False, "Id": 1234567890}, - {"account_full_name":"mynewaccountname", "needs_created": True} - ] +class FailureTestCase(unittest.TestCase): + # pylint: disable=W0106 + def test_event_parsing(self): + sample_event = {} + with self.assertRaises(ValueError) as _error: + get_details_from_event(sample_event) + self.assertEqual(str(_error.exception), "No S3 Event details present in event trigger")