From 66b2f9ee506c06cdbee0c2f90d3ee655c188c748 Mon Sep 17 00:00:00 2001 From: Buckingham Date: Thu, 12 Dec 2024 10:56:18 +0000 Subject: [PATCH] Update_121224_1 --- terraform/environments/ppud/lambda.tf | 104 ++++++++++++ .../lambda_scripts/disk_info_report_prod.py | 138 ++++++++++++++++ .../disk_read_write_report_prod.py | 152 ++++++++++++++++++ .../lambda_scripts/ppud_elb_report_prod.py | 2 +- .../lambda_scripts/wam_elb_report_prod.py | 2 +- 5 files changed, 396 insertions(+), 2 deletions(-) create mode 100644 terraform/environments/ppud/lambda_scripts/disk_info_report_prod.py create mode 100644 terraform/environments/ppud/lambda_scripts/disk_read_write_report_prod.py diff --git a/terraform/environments/ppud/lambda.tf b/terraform/environments/ppud/lambda.tf index 0786b37bd28..ab1bbda3585 100644 --- a/terraform/environments/ppud/lambda.tf +++ b/terraform/environments/ppud/lambda.tf @@ -711,4 +711,108 @@ data "archive_file" "zip_the_send_memory_graph_code_prod" { type = "zip" source_dir = "${path.module}/lambda_scripts/" output_path = "${path.module}/lambda_scripts/send_memory_graph_prod.zip" +} + +################################################# +# Lambda Function to send Disk Info Report - PROD +################################################# + +resource "aws_lambda_permission" "allow_lambda_to_query_cloudwatch_disk_info_report_prod" { + count = local.is-production == true ? 1 : 0 + statement_id = "AllowAccesstoCloudWatch" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.terraform_lambda_func_disk_info_report_prod[0].function_name + principal = "cloudwatch.amazonaws.com" + source_arn = "arn:aws:cloudwatch:eu-west-2:${local.environment_management.account_ids["ppud-production"]}:*" +} + +resource "aws_lambda_function" "terraform_lambda_func_disk_info_report_prod" { + # checkov:skip=CKV_AWS_272: "PPUD Lambda code signing temporarily disabled for maintenance purposes" + count = local.is-production == true ? 1 : 0 + filename = "${path.module}/lambda_scripts/disk_info_report_prod.zip" + function_name = "disk_info_report" + role = aws_iam_role.lambda_role_cloudwatch_get_metric_data_prod[0].arn + handler = "disk_info_report_prod.lambda_handler" + runtime = "python3.12" + timeout = 300 + depends_on = [aws_iam_role_policy_attachment.attach_lambda_policy_cloudwatch_get_metric_data_to_lambda_role_cloudwatch_get_metric_data_prod] + reserved_concurrent_executions = 5 + # code_signing_config_arn = "arn:aws:lambda:eu-west-2:${local.environment_management.account_ids["ppud-production"]}:code-signing-config:csc-0bafee04a642a41c1" + dead_letter_config { + target_arn = aws_sqs_queue.lambda_queue_prod[0].arn + } + tracing_config { + mode = "Active" + } + layers = [ + "arn:aws:lambda:eu-west-2:${data.aws_ssm_parameter.klayers_account_prod[0].value}:layer:Klayers-p312-numpy:8", + "arn:aws:lambda:eu-west-2:${data.aws_ssm_parameter.klayers_account_prod[0].value}:layer:Klayers-p312-pillow:1", + aws_lambda_layer_version.lambda_layer_matplotlib_prod[0].arn + ] + # VPC configuration + vpc_config { + subnet_ids = [data.aws_subnet.private_subnets_b.id] + security_group_ids = [aws_security_group.PPUD-Mail-Server[0].id] + } +} + +# Archive the zip file + +data "archive_file" "zip_the_disk_info_report_code_prod" { + count = local.is-production == true ? 1 : 0 + type = "zip" + source_dir = "${path.module}/lambda_scripts/" + output_path = "${path.module}/lambda_scripts/disk_info_report_prod.zip" +} + +####################################################### +# Lambda Function to send Disk Read Write Report - PROD +####################################################### + +resource "aws_lambda_permission" "allow_lambda_to_query_cloudwatch_disk_read_write_report_prod" { + count = local.is-production == true ? 1 : 0 + statement_id = "AllowAccesstoCloudWatch" + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.terraform_lambda_func_disk_read_write_report_prod[0].function_name + principal = "cloudwatch.amazonaws.com" + source_arn = "arn:aws:cloudwatch:eu-west-2:${local.environment_management.account_ids["ppud-production"]}:*" +} + +resource "aws_lambda_function" "terraform_lambda_func_disk_read_write_report_prod" { + # checkov:skip=CKV_AWS_272: "PPUD Lambda code signing temporarily disabled for maintenance purposes" + count = local.is-production == true ? 1 : 0 + filename = "${path.module}/lambda_scripts/disk_read_write_report_prod.zip" + function_name = "disk_read_write_report" + role = aws_iam_role.lambda_role_cloudwatch_get_metric_data_prod[0].arn + handler = "disk_read_write_report_prod.lambda_handler" + runtime = "python3.12" + timeout = 300 + depends_on = [aws_iam_role_policy_attachment.attach_lambda_policy_cloudwatch_get_metric_data_to_lambda_role_cloudwatch_get_metric_data_prod] + reserved_concurrent_executions = 5 + # code_signing_config_arn = "arn:aws:lambda:eu-west-2:${local.environment_management.account_ids["ppud-production"]}:code-signing-config:csc-0bafee04a642a41c1" + dead_letter_config { + target_arn = aws_sqs_queue.lambda_queue_prod[0].arn + } + tracing_config { + mode = "Active" + } + layers = [ + "arn:aws:lambda:eu-west-2:${data.aws_ssm_parameter.klayers_account_prod[0].value}:layer:Klayers-p312-numpy:8", + "arn:aws:lambda:eu-west-2:${data.aws_ssm_parameter.klayers_account_prod[0].value}:layer:Klayers-p312-pillow:1", + aws_lambda_layer_version.lambda_layer_matplotlib_prod[0].arn + ] + # VPC configuration + vpc_config { + subnet_ids = [data.aws_subnet.private_subnets_b.id] + security_group_ids = [aws_security_group.PPUD-Mail-Server[0].id] + } +} + +# Archive the zip file + +data "archive_file" "zip_the_disk_read_write_report_code_prod" { + count = local.is-production == true ? 1 : 0 + type = "zip" + source_dir = "${path.module}/lambda_scripts/" + output_path = "${path.module}/lambda_scripts/disk_read_write_report_prod.zip" } \ No newline at end of file diff --git a/terraform/environments/ppud/lambda_scripts/disk_info_report_prod.py b/terraform/environments/ppud/lambda_scripts/disk_info_report_prod.py new file mode 100644 index 00000000000..3ac99af7c9e --- /dev/null +++ b/terraform/environments/ppud/lambda_scripts/disk_info_report_prod.py @@ -0,0 +1,138 @@ +# Python script to retrieve cloudwatch metic data (disk information), graph it and email it to end users via the internal mail relay. +# Nick Buckingham +# 12 December 2024 + +import boto3 +import os +os.environ['MPLCONFIGDIR'] = "/tmp/graph" +import re +import io +import base64 +from datetime import datetime, timedelta +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.mime.base import MIMEBase +from email import encoders +from botocore.exceptions import NoCredentialsError, PartialCredentialsError + +# Configuration +CURRENT_DATE = datetime.now().strftime('%a %d %b %Y') +SENDER = "donotreply@cjsm.secure-email.ppud.justice.gov.uk" +RECIPIENTS = ['nick.buckingham@colt.net'] +SUBJECT = f'AWS PPUD Disk Information Report - {CURRENT_DATE}' +AWS_REGION = 'eu-west-2' +bucket_name = 'moj-lambda-layers-prod' +file_key = 'all-disks.log' + +# SMTP Configuration +SMTP_SERVER = "10.27.9.39" +SMTP_PORT = 25 + +# Initialize boto3 client +s3 = boto3.client('s3') +cloudwatch = boto3.client("cloudwatch", region_name=AWS_REGION) +ses = boto3.client("ses") + +def retrieve_file_from_s3(bucket, key): + response = s3.get_object(Bucket=bucket, Key=key) + content = response['Body'].read().decode('utf-8') + return content + +def parse_disk_info(content): + disk_info_pattern = re.compile( + r"Hostname\s+:\s+(.+)\n" + r"Current Date\s+:\s+(.+)\n" + r"Drive Letter\s+:\s+(.+)\n" + r"Drive Label\s+:\s*(.*)\n" + r"File System Type\s+:\s+(.+)\n" + r"Total Capacity \(GB\)\s+:\s+(.+)\n" + r"Used Capacity \(GB\)\s+:\s+(.+)\n" + r"Total Free Space \(GB\)\s+:\s+(.+)\n" + r"% Free Space\s+:\s+(.+)\n" + r"Status\s+:\s+(.+)" + ) + matches = disk_info_pattern.findall(content) + return matches + +def format_disk_info(disk_info): + # Sort disk_info to place the C drive first + sorted_disk_info = sorted(disk_info, key=lambda x: (x[2] != 'C', x)) + + formatted_info = """ + + + + + + + + + + + """ + + current_hostname = None + for info in sorted_disk_info: + if current_hostname != info[0]: + if current_hostname is not None: + # formatted_info += f"""""" + formatted_info += f"""""" + current_hostname = info[0] + + formatted_info += f""" + + + + + + + + + + """ + formatted_info += "
ServerDriveDrive LabelFile SystemTotal Capacity (GB)Used Capacity (GB)Free Space (GB)% Free SpaceStatus
{info[0]}{info[2]}{info[3]}{info[4]}{info[5]}{info[6]}{info[7]}{info[8]}%{info[9]}
" + return formatted_info + +def send_email(subject, body_html): + msg = MIMEMultipart() + msg['From'] = SENDER + msg['To'] = ', '.join(RECIPIENTS) + msg['Subject'] = subject + + msg.attach(MIMEText(body_html, 'html')) + + with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as server: + # server.starttls() + # server.login(smtp_username, smtp_password) + server.send_message(msg) + +def lambda_handler(event, context): + # Retrieve file from S3 + content = retrieve_file_from_s3(bucket_name, file_key) + + # Parse disk information + disk_info = parse_disk_info(content) + + # Format disk information + formatted_info = format_disk_info(disk_info) + + # Email formatted disk information + subject = 'AWS PPUD Disk Information Report' + body_html = f""" + + +

Hi Team

+

+

Please find below the PPUD disk information report.

+

{formatted_info}

+

This is an automated email

+ + """ + + send_email(subject, body_html) + + return { + 'statusCode': 200, + 'body': 'Email sent successfully!' + } diff --git a/terraform/environments/ppud/lambda_scripts/disk_read_write_report_prod.py b/terraform/environments/ppud/lambda_scripts/disk_read_write_report_prod.py new file mode 100644 index 00000000000..ef83c339992 --- /dev/null +++ b/terraform/environments/ppud/lambda_scripts/disk_read_write_report_prod.py @@ -0,0 +1,152 @@ +# Python script to retrieve cloudwatch metic data (disk read / write), graph it and email it to end users via the internal mail relay. +# Nick Buckingham +# 12 December 2024 + +import boto3 +import os +os.environ['MPLCONFIGDIR'] = "/tmp/graph" +import matplotlib.pyplot as plt +from datetime import datetime, timedelta +import io +import base64 +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +# Initialize boto3 clients +cloudwatch = boto3.client('cloudwatch') +# ses_client = boto3.client('ses', region_name=REGION) + +# Configuration +CURRENT_DATE = datetime.now().strftime('%a %d %b %Y') +INSTANCE_ID = "i-080498c4c9d25e6bd" +SERVER = "021" +#START_TIME = datetime(2024, 12, 4, 8, 0, 0) +#END_TIME = datetime(2024, 12, 4, 14, 0, 0) +END_TIME = datetime.utcnow() +START_TIME = END_TIME - timedelta(hours=9) +SENDER = "donotreply@cjsm.secure-email.ppud.justice.gov.uk" +RECIPIENTS = ["nick.buckingham@colt.net"] +SUBJECT = f'AWS EC2 Disk Read-Write Report - {SERVER} - {CURRENT_DATE}' +REGION = "eu-west-2" +IMAGE_ID = "ami-02f8251c8cdf2464f" +INSTANCE_TYPE = "m5.2xlarge" + +# SMTP Configuration +SMTP_SERVER = "10.27.9.39" +SMTP_PORT = 25 + +def get_metric_data(namespace, metric_name, dimensions): + response = cloudwatch.get_metric_data( + MetricDataQueries=[ + { + 'Id': 'm1', + 'MetricStat': { + 'Metric': { + 'Namespace': namespace, + 'MetricName': metric_name, + 'Dimensions': dimensions + }, + 'Period': 300, + 'Stat': 'Maximum' + }, + 'ReturnData': True + }, + ], + StartTime=START_TIME, + EndTime=END_TIME + ) + return response['MetricDataResults'][0] + +def create_graph(read_bytes_data, write_bytes_data): + plt.figure(figsize=(20, 5)) + plt.plot(read_bytes_data['Timestamps'], read_bytes_data['Values'], label='SQL Server Read Bytes', marker="o", linestyle="-", color="teal") + plt.plot(write_bytes_data['Timestamps'], write_bytes_data['Values'], label='SQL Server Write Bytes', marker="o", linestyle="-", color="royalblue") + + plt.xlabel('Time') + plt.ylabel('Bytes (Read/Written)') + plt.title(f'EC2 Disk Read Write Report - {SERVER} - {CURRENT_DATE}') + plt.legend() + plt.grid(True) + plt.tight_layout() + + # Save the graph to a temporary buffer + temp_file = "/tmp/disk_read_write.png" + plt.savefig(temp_file) + plt.close() + + # Read the image and encode it to base64 + with open(temp_file, "rb") as image_file: + encoded_string = base64.b64encode(image_file.read()).decode("utf-8") + + # Cleanup temporary file + os.remove(temp_file) + return encoded_string + +def email_image_to_users(graph_base64): + """ + Send an email with the graph embedded in the email body using AWS SES. + """ + ses_client = boto3.client("ses", region_name=REGION) + + # Email body with the embedded image + email_body = f""" + + +

Hi Team,

+

Please find below the disk read / write metrics for EC2 instance {SERVER} for today from 08:00 to 17:00.

+ Disk Read Write Graph +

This is an automated email.

+ + + """ + + # Create the email message + msg = MIMEMultipart("alternative") + msg["From"] = SENDER + msg["To"] = ", ".join(RECIPIENTS) + msg["Subject"] = SUBJECT + + # Attach the HTML body + msg.attach(MIMEText(email_body, "html")) + + # Send the email with AWS SES + # try: + # response = ses_client.send_raw_email( + # Source=SENDER, + # Destinations=RECIPIENTS, + # RawMessage={"Data": msg.as_string()}, + # ) + # print("Email sent! Message ID:", response["MessageId"]) + # except Exception as e: + # print("Error sending email:", e) + # raise + + # Send the email with an EC2 Instance Mail Relay + try: + with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as server: + # server.starttls() + # server.login(SENDER, EMAIL_PASSWORD) + server.sendmail(SENDER, RECIPIENTS, msg.as_string()) + print("Email sent successfully.") + except Exception as e: + print(f"Error sending email: {e}") + +def lambda_handler(event, context): + + read_bytes_data = get_metric_data('CWAgent', 'procstat read_bytes', [{'Name': 'InstanceId', 'Value': INSTANCE_ID}, {'Name': 'process_name', 'Value': 'sqlservr.exe'}, {'Name': 'exe', 'Value': 'sqlservr'}, {'Name': 'ImageId', 'Value': IMAGE_ID}, {'Name': 'InstanceType', 'Value': INSTANCE_TYPE}]) + write_bytes_data = get_metric_data('CWAgent', 'procstat write_usage', [{'Name': 'InstanceId', 'Value': INSTANCE_ID}, {'Name': 'process_name', 'Value': 'sqlservr.exe'}, {'Name': 'exe', 'Value': 'sqlservr'}, {'Name': 'ImageId', 'Value': IMAGE_ID}, {'Name': 'InstanceType', 'Value': INSTANCE_TYPE}]) + + # Create a graph and encode it as base64 + print("Creating graph...") + graph_base64 = create_graph(read_bytes_data, write_bytes_data) + + # Send email with the graph embedded + print("Sending email...") + #email_image_to_users(graph_image.getvalue()) + email_image_to_users(graph_base64) + + return { + 'statusCode': 200, + 'body': 'Graph successfully emailed!' + } diff --git a/terraform/environments/ppud/lambda_scripts/ppud_elb_report_prod.py b/terraform/environments/ppud/lambda_scripts/ppud_elb_report_prod.py index bcb74890111..0e4d8aae316 100644 --- a/terraform/environments/ppud/lambda_scripts/ppud_elb_report_prod.py +++ b/terraform/environments/ppud/lambda_scripts/ppud_elb_report_prod.py @@ -17,7 +17,7 @@ # Configuration CURRENT_DATE = datetime.now().strftime('%a %d %b %Y') -SENDER = 'noreply@internaltest.ppud.justice.gov.uk' +SENDER = "donotreply@cjsm.secure-email.ppud.justice.gov.uk" RECIPIENTS = ['nick.buckingham@colt.net', 'kofi-nimoh@colt.net'] SUBJECT = f'AWS PPUD Load Balancer Report - {CURRENT_DATE}' AWS_REGION = 'eu-west-2' diff --git a/terraform/environments/ppud/lambda_scripts/wam_elb_report_prod.py b/terraform/environments/ppud/lambda_scripts/wam_elb_report_prod.py index 09b4612db8c..15841981e25 100644 --- a/terraform/environments/ppud/lambda_scripts/wam_elb_report_prod.py +++ b/terraform/environments/ppud/lambda_scripts/wam_elb_report_prod.py @@ -17,7 +17,7 @@ # Configuration CURRENT_DATE = datetime.now().strftime('%a %d %b %Y') -SENDER = 'noreply@internaltest.ppud.justice.gov.uk' +SENDER = "donotreply@cjsm.secure-email.ppud.justice.gov.uk" RECIPIENTS = ['nick.buckingham@colt.net'] SUBJECT = f'AWS WAM Load Balancer Report - {CURRENT_DATE}' AWS_REGION = 'eu-west-2'