Skip to content

Commit

Permalink
Merge pull request #8340 from ministryofjustice/TM-586
Browse files Browse the repository at this point in the history
TM-586; TM-614 Adjusted schedule to 4X a day and merging alert log alarm
  • Loading branch information
vc13837 authored Oct 18, 2024
2 parents f22bdd5 + e02fd02 commit 7fb642e
Show file tree
Hide file tree
Showing 7 changed files with 23 additions and 332 deletions.
12 changes: 6 additions & 6 deletions terraform/environments/apex/backups.tf
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ resource "aws_backup_vault_policy" "apex" {
}

############################################################################
## This following is required for setting up hourly backup for production
## This following is required for setting up backup for production
############################################################################


Expand All @@ -72,14 +72,14 @@ resource "aws_backup_vault" "prod_apex" {

resource "aws_backup_plan" "prod_apex" {
count = local.environment == "production" ? 1 : 0
name = "${local.application_name}-backup-hourly-retain-35-days"
name = "${local.application_name}-backup-retain-35-days"

rule {
rule_name = "${local.application_name}-backup-hourly-retain-35-days"
rule_name = "${local.application_name}-backup-retain-35-days"
target_vault_name = aws_backup_vault.prod_apex[0].name

# Backup hourly
schedule = "cron(0 * * * ? *)"
# Backup every 6 hours on the hour
schedule = "cron(0 0,6,12,18 * * ? *)"

lifecycle {
delete_after = 35
Expand Down Expand Up @@ -108,7 +108,7 @@ resource "aws_backup_selection" "prod_apex" {

condition {
string_equals {
key = "aws:ResourceTag/snapshot-with-hourly-35-day-retention"
key = "aws:ResourceTag/snapshot-35-day-retention"
value = "yes"
}
# TODO tags required to be confirmed
Expand Down
2 changes: 1 addition & 1 deletion terraform/environments/apex/cloudwatch.tf
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ resource "aws_cloudwatch_metric_alarm" "database_cpu" {
resource "aws_cloudwatch_metric_alarm" "database_oracle_alerts" {

alarm_name = "${local.application_name}-${local.environment}-oracle-alerts-log-errors"
alarm_description = "Errors Detected in Oracle Alerts Log."
alarm_description = "Errors Detected in Oracle Alerts Log, please check the log group ${aws_cloudwatch_log_group.database.name}"
comparison_operator = "GreaterThanOrEqualToThreshold"
evaluation_periods = "1"
metric_name = aws_cloudwatch_log_metric_filter.database.name
Expand Down
5 changes: 5 additions & 0 deletions terraform/environments/apex/cloudwatch_agent_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,11 @@
"file_path": "/home/oracle/logs/pmon_status_alert.log",
"log_group_name": "APEX-EC2-database-pmon-status",
"log_stream_name": "pmon-status-{instance_id}"
},
{
"file_path": "/home/oracle/logs/alert_log_check.txt",
"log_group_name": "APEX-EC2-database-alert",
"log_stream_name": "alertlog-{instance_id}"
}
]
}
Expand Down
19 changes: 7 additions & 12 deletions terraform/environments/apex/ec2.tf
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ resource "aws_instance" "apex_db_instance" {
volume_type = "gp2"
tags = merge(
local.tags,
{ "Name" = "${local.application_name}db-ec2-root" },
local.backup_schedule_tags
{ "Name" = "${local.application_name}db-ec2-root" }
)
}

tags = merge(
local.tags,
{ "Name" = local.database_ec2_name },
{ "instance-scheduling" = "skip-scheduling" }
{ "instance-scheduling" = "skip-scheduling" },
local.backup_schedule_tags
)
}

Expand Down Expand Up @@ -176,8 +176,7 @@ resource "aws_ebs_volume" "u01-orahome" {
}
tags = merge(
local.tags,
{ "Name" = "${local.application_name}db-ec2-u01-orahome" },
local.backup_schedule_tags
{ "Name" = "${local.application_name}db-ec2-u01-orahome" }
)
}
resource "aws_volume_attachment" "u01-orahome" {
Expand All @@ -198,8 +197,7 @@ resource "aws_ebs_volume" "u02-oradata" {
}
tags = merge(
local.tags,
{ "Name" = "${local.application_name}db-ec2-u02-oradata" },
local.backup_schedule_tags
{ "Name" = "${local.application_name}db-ec2-u02-oradata" }
)
}

Expand All @@ -223,8 +221,7 @@ resource "aws_ebs_volume" "u03-redo" {
}
tags = merge(
local.tags,
{ "Name" = "${local.application_name}db-ec2-u03-redo" },
local.backup_schedule_tags
{ "Name" = "${local.application_name}db-ec2-u03-redo" }
)
}
resource "aws_volume_attachment" "u03-redo" {
Expand All @@ -245,8 +242,7 @@ resource "aws_ebs_volume" "u04-arch" {
}
tags = merge(
local.tags,
{ "Name" = "${local.application_name}db-ec2-u04-arch" },
local.backup_schedule_tags
{ "Name" = "${local.application_name}db-ec2-u04-arch" }
)
}
resource "aws_volume_attachment" "u04-arch" {
Expand Down Expand Up @@ -318,4 +314,3 @@ resource "aws_cloudwatch_log_metric_filter" "pmon_status" {




3 changes: 1 addition & 2 deletions terraform/environments/apex/efs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ resource "aws_efs_file_system" "efs" {

tags = merge(
local.tags,
{ "Name" = "mp-${local.application_name}-efs" },
local.environment != "production" ? { "snapshot-with-daily-35-day-retention" = "yes" } : { "snapshot-with-hourly-35-day-retention" = "yes" }
{ "Name" = "mp-${local.application_name}-efs" }
)

lifecycle_policy {
Expand Down
2 changes: 1 addition & 1 deletion terraform/environments/apex/locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ locals {
app_db_password_name = "APP_APEX_DBPASSWORD_TAD"
db_hostname = "db.${local.application_name}"

backup_schedule_tags = local.environment == "production" ? { "snapshot-with-hourly-35-day-retention" = "yes" } : { "snapshot-with-daily-7-day-retention" = "yes" }
backup_schedule_tags = local.environment == "production" ? { "snapshot-35-day-retention" = "yes" } : null
database-instance-userdata = <<EOF
#!/bin/bash
cd /tmp
Expand Down
Loading

0 comments on commit 7fb642e

Please sign in to comment.