Skip to content

Commit

Permalink
Switching to logger rather than print
Browse files Browse the repository at this point in the history
  • Loading branch information
StewartW committed Nov 17, 2021
1 parent 166c6eb commit 7495fd3
Show file tree
Hide file tree
Showing 8 changed files with 77 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,16 @@
import os
from sts import STS
from aws_xray_sdk.core import patch_all
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)
ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME")


def create_account_alias(account, iam_client):
print(
LOGGER.info(
f"Ensuring Account: {account.get('account_full_name')} has alias {account.get('alias')}"
)
try:
Expand All @@ -27,9 +29,6 @@ def create_account_alias(account, iam_client):

def lambda_handler(event, _):
if event.get("alias"):
print(
f"Ensuring Account: {event.get('account_full_name')} has alias {event.get('alias')}"
)
sts = STS()
account_id = event.get("Id")
role = sts.assume_cross_account_role(
Expand All @@ -38,7 +37,7 @@ def lambda_handler(event, _):
)
create_account_alias(event, role.client("iam"))
else:
print(
f"Ensuring Account: {event.get('account_full_name')} does not need an alias"
LOGGER.info(
f"Account: {event.get('account_full_name')} does not need an alias"
)
return event
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,15 @@
from organizations import Organizations
import boto3
from aws_xray_sdk.core import patch_all
from logger import configure_logger


patch_all()
LOGGER = configure_logger(__name__)


def lambda_handler(event, _):
print(
LOGGER.info(
f"Ensuring Account: {event.get('account_full_name')} is in OU {event.get('organizational_unit_path')}"
)
organizations = Organizations(boto3)
Expand Down
11 changes: 7 additions & 4 deletions src/lambda_codebase/account_processing/configure_account_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,26 @@

import boto3
from aws_xray_sdk.core import patch_all
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)


def create_account_tags(account_id, tags, org_session: Organizations):
LOGGER.info(
f"Ensuring Account: {account_id} has tags: {tags}"
)
org_session.create_account_tags(account_id, tags)


def lambda_handler(event, _):
if event.get("tags"):
print(
f"Ensuring Account: {event.get('account_full_name')} has tags {event.get('tags')}"
)
organizations = Organizations(boto3)
create_account_tags(event.get("Id"), event.get("tags"), organizations)
else:
print(
LOGGER.info(
f"Account: {event.get('account_full_name')} does not need tags configured"
)
return event
5 changes: 4 additions & 1 deletion src/lambda_codebase/account_processing/create_account.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,16 @@
import os
from aws_xray_sdk.core import patch_all
import boto3
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)
ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME")


def create_account(account, adf_role_name, org_client):
LOGGER.info(f"Creating account {account.get('account_full_name')}")
allow_billing = "ALLOW" if account.get("allow_billing", False) else "DENY"
response = org_client.create_account(
Email=account.get("email"),
Expand All @@ -35,6 +39,5 @@ def create_account(account, adf_role_name, org_client):


def lambda_handler(event, _):
print(f"Creating account {event.get('account_full_name')}")
org_client = boto3.client("organizations")
return create_account(event, ADF_ROLE_NAME, org_client)
5 changes: 4 additions & 1 deletion src/lambda_codebase/account_processing/delete_default_vpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@
import os
from sts import STS
from aws_xray_sdk.core import patch_all
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)
ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME")


def lambda_handler(event, _):
event = event.get("Payload")
print(f"Deleting Default vpc: {event.get('account_full_name')}")
LOGGER.info(f"Deleting Default vpc: {event.get('account_full_name')}")
sts = STS()
account_id = event.get("account")

Expand Down
7 changes: 5 additions & 2 deletions src/lambda_codebase/account_processing/get_account_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,16 @@
import os
from sts import STS
from aws_xray_sdk.core import patch_all
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)
ADF_ROLE_NAME = os.getenv("ADF_ROLE_NAME")


def lambda_handler(event, _):
print(f"Fetching Default regions {event.get('account_full_name')}")
LOGGER.info(f"Fetching Default regions {event.get('account_full_name')}")
sts = STS()
account_id = event.get("Id")
role = sts.assume_cross_account_role(
Expand All @@ -37,6 +40,6 @@ def lambda_handler(event, _):
],
)["Regions"]
]
print(default_regions)
LOGGER.debug(f"Default regions for {account_id}: {default_regions}")
event["default_regions"] = default_regions
return event
27 changes: 22 additions & 5 deletions src/lambda_codebase/account_processing/process_account_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,43 @@
import os
from typing import Tuple
import yaml
from yaml.error import YAMLError

import boto3
from botocore.exceptions import ClientError
from aws_xray_sdk.core import patch_all
from organizations import Organizations
from logger import configure_logger

patch_all()

LOGGER = configure_logger(__name__)
ACCOUNT_MANAGEMENT_STATEMACHINE = os.getenv("ACCOUNT_MANAGEMENT_STATEMACHINE_ARN")


def get_details_from_event(event: dict):
s3_details = event.get("Records", [{}])[0].get("s3")
if not s3_details:
raise ValueError("No S3 Event details present in event trigger")
bucket_name = s3_details.get("bucket", {}).get("name")
object_key = s3_details.get("object", {}).get("key")
return bucket_name, object_key
return {"bucket_name":bucket_name, "object_key":object_key}


def get_file_from_s3(s3_object: Tuple, s3_client: boto3.resource):
def get_file_from_s3(s3_object: dict, s3_client: boto3.resource):
bucket_name, object_key = s3_object
s3_object = s3_client.Object(bucket_name, object_key)
s3_object.download_file(f"/tmp/{object_key}")
with open(f"/tmp/{object_key}", encoding="utf-8") as data_stream:
data = yaml.safe_load(data_stream)
try:
s3_object.download_file(f"/tmp/{object_key}")
except ClientError as e:
LOGGER.error(f"Failed to download {object_key} from {bucket_name}")
raise e
try:
with open(f"/tmp/{object_key}", encoding="utf-8") as data_stream:
data = yaml.safe_load(data_stream)
except YAMLError as p_e:
LOGGER.error("Failed to parse YAML file")
raise p_e

return data

Expand Down Expand Up @@ -60,7 +75,9 @@ def process_account_list(all_accounts, accounts_in_file):


def start_executions(sfn, processed_account_list):
LOGGER.info(f"Invoking Account Management State Machine ({ACCOUNT_MANAGEMENT_STATEMACHINE})")
for account in processed_account_list:
LOGGER.debug(f"Payload: {account}")
sfn.start_execution(
stateMachineArn=ACCOUNT_MANAGEMENT_STATEMACHINE,
input=f"{json.dumps(account)}",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,24 +1,33 @@
"""
Tests the account file processing lambda
"""
import unittest
from ..process_account_files import process_account, process_account_list, get_details_from_event

from ..process_account_files import process_account, process_account_list
class SuccessTestCase(unittest.TestCase):
# pylint: disable=W0106
def test_process_account_when_account_exists(self):
test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"}
account_lookup = {"mytestaccountname":1234567890}
self.assertDictEqual(process_account(account_lookup, test_account), {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "Id": 1234567890, "needs_created": False})

# pylint: disable=W0106
def test_process_account_when_account_exists():
test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"}
account_lookup = {"mytestaccountname":1234567890}
assert process_account(account_lookup, test_account) == {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "Id": 1234567890, "needs_created": False}
def test_process_account_when_account_doesnt_exist(self):
test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"}
account_lookup = {"mydifferentaccount":1234567890}
self.assertDictEqual(process_account(account_lookup, test_account), {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "needs_created": True})

def test_process_account_when_account_doesnt_exist():
test_account = {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname"}
account_lookup = {"mydifferentaccount":1234567890}
assert process_account(account_lookup, test_account) == {"alias": "MyCoolAlias", "account_full_name":"mytestaccountname", "needs_created": True}
def test_process_account_list(self):
all_accounts = [{"Name":"mytestaccountname", "Id":1234567890}]
accounts_in_file = [{"account_full_name": "mytestaccountname"}, {"account_full_name": "mynewaccountname"}]
self.assertListEqual(process_account_list(all_accounts, accounts_in_file), [
{"account_full_name":"mytestaccountname", "needs_created": False, "Id": 1234567890},
{"account_full_name":"mynewaccountname", "needs_created": True}
])

def test_process_account_list():
all_accounts = [{"Name":"mytestaccountname", "Id":1234567890}]
accounts_in_file = [{"account_full_name": "mytestaccountname"}, {"account_full_name": "mynewaccountname"}]
assert process_account_list(all_accounts, accounts_in_file) == [
{"account_full_name":"mytestaccountname", "needs_created": False, "Id": 1234567890},
{"account_full_name":"mynewaccountname", "needs_created": True}
]
class FailureTestCase(unittest.TestCase):
# pylint: disable=W0106
def test_event_parsing(self):
sample_event = {}
with self.assertRaises(ValueError) as _error:
get_details_from_event(sample_event)
self.assertEqual(str(_error.exception), "No S3 Event details present in event trigger")

0 comments on commit 7495fd3

Please sign in to comment.