Skip to content

Commit

Permalink
Merge pull request #3660 from ministryofjustice/date_2023_10_16
Browse files Browse the repository at this point in the history
GitHub Actions Code Formatter workflow
  • Loading branch information
ASTRobinson authored Oct 17, 2023
2 parents bf988f3 + 1b1304a commit ca32ce7
Show file tree
Hide file tree
Showing 10 changed files with 137 additions and 129 deletions.
32 changes: 18 additions & 14 deletions terraform/environments/apex/cloudwatch_agent_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,13 @@
"metrics_collection_interval": 60,
"drop_device": true,
"resources": ["*"],
"ignore_file_system_types": ["tmpfs", "devtmpfs", "sysfs", "fuse.s3fs", "nfs4"]
"ignore_file_system_types": [
"tmpfs",
"devtmpfs",
"sysfs",
"fuse.s3fs",
"nfs4"
]
},
"diskio": {
"measurement": [
Expand Down Expand Up @@ -67,19 +73,17 @@
}
},
"logs": {
"logs_collected": {
"files": {
"collect_list": [
{
"file_path": "/var/log/messages",
"log_group_name": "APEX-EC2",
"log_stream_name": "{instance_id}",
"retention_in_days": 90


}
]
}
"logs_collected": {
"files": {
"collect_list": [
{
"file_path": "/var/log/messages",
"log_group_name": "APEX-EC2",
"log_stream_name": "{instance_id}",
"retention_in_days": 90
}
]
}
}
}
}
2 changes: 1 addition & 1 deletion terraform/environments/apex/ec2.tf
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ resource "aws_instance" "apex_db_instance" {
subnet_id = data.aws_subnet.private_subnets_a.id
iam_instance_profile = aws_iam_instance_profile.ec2_instance_profile.id
user_data_base64 = base64encode(local.instance-userdata)


root_block_device {
delete_on_termination = false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,15 @@
},
{
"properties": {
"type": {
"type": "string",
"pattern": "^float$|^double$|^decimal\\(\\d{1,2},\\s?\\d{1,2}\\)$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "float"
"type": {
"type": "string",
"pattern": "^float$|^double$|^decimal\\(\\d{1,2},\\s?\\d{1,2}\\)$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "float"
}
}
}
}]
]
},
"string_types": {
"allOf": [
Expand All @@ -68,14 +69,15 @@
},
{
"properties": {
"type": {
"type": "string",
"pattern": "^char\\(\\d{1,3}\\)$|^varchar\\(\\d{0,5}\\)$|^varchar$|^string$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "string"
"type": {
"type": "string",
"pattern": "^char\\(\\d{1,3}\\)$|^varchar\\(\\d{0,5}\\)$|^varchar$|^string$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "string"
}
}
}
}]
]
},
"boolean_types": {
"allOf": [
Expand All @@ -84,14 +86,15 @@
},
{
"properties": {
"type": {
"type": "string",
"pattern": "^boolean$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "boolean"
"type": {
"type": "string",
"pattern": "^boolean$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "boolean"
}
}
}
}]
]
},
"date_types": {
"allOf": [
Expand All @@ -100,14 +103,15 @@
},
{
"properties": {
"type": {
"type": "string",
"pattern": "^date$|^timestamp$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "date"
"type": {
"type": "string",
"pattern": "^date$|^timestamp$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "date"
}
}
}
}]
]
},
"struct_types": {
"allOf": [
Expand All @@ -116,14 +120,15 @@
},
{
"properties": {
"type": {
"type": "string",
"pattern": "^map_<.+>$|^struct<.+>$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "struct<id:string,report_date:date,modified_date:timestamp>"
"type": {
"type": "string",
"pattern": "^map_<.+>$|^struct<.+>$",
"description": "The data type of the Column. See https://docs.aws.amazon.com/athena/latest/ug/data-types.html",
"example": "struct<id:string,report_date:date,modified_date:timestamp>"
}
}
}
}]
]
}
},
"properties": {
Expand Down Expand Up @@ -161,4 +166,3 @@
},
"additionalProperties": false
}

Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ data "aws_iam_policy_document" "ec2_access_for_ansible" {

data "aws_iam_policy_document" "allow_access_to_ssm_parameter_store" {
statement {
sid = "AllowAccessToSsmParameterStore"
effect = "Allow"
sid = "AllowAccessToSsmParameterStore"
effect = "Allow"
actions = [
"ssm:PutParameter"
]
Expand Down
2 changes: 1 addition & 1 deletion terraform/environments/digital-prison-reporting/locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ locals {
lambda_transfercomp_layer_name = "${local.project}-redhift-jdbc-dependency-layer"

# Enable CW alarms
enable_cw_alarm = local.application_data.accounts[local.environment].setup_cw_alarms
enable_cw_alarm = local.application_data.accounts[local.environment].setup_cw_alarms

# Sonatype Secrets
setup_sonatype_secrets = local.application_data.accounts[local.environment].setup_sonatype_secrets
Expand Down
78 changes: 39 additions & 39 deletions terraform/environments/digital-prison-reporting/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,21 @@ module "glue_reporting_hub_job" {
create_security_configuration = local.create_sec_conf
temp_dir = "s3://${module.s3_glue_job_bucket.bucket_id}/tmp/${local.project}-reporting-hub-${local.env}/"
# Using s3a for checkpoint because to align with Hadoop 3 supports
checkpoint_dir = "s3a://${module.s3_glue_job_bucket.bucket_id}/checkpoint/${local.project}-reporting-hub-${local.env}/"
spark_event_logs = "s3://${module.s3_glue_job_bucket.bucket_id}/spark-logs/${local.project}-reporting-hub-${local.env}/"
checkpoint_dir = "s3a://${module.s3_glue_job_bucket.bucket_id}/checkpoint/${local.project}-reporting-hub-${local.env}/"
spark_event_logs = "s3://${module.s3_glue_job_bucket.bucket_id}/spark-logs/${local.project}-reporting-hub-${local.env}/"
# Placeholder Script Location
script_location = local.glue_placeholder_script_location
enable_continuous_log_filter = false
project_id = local.project
aws_kms_key = local.s3_kms_arn
additional_policies = module.kinesis_stream_ingestor.kinesis_stream_iam_policy_admin_arn
execution_class = "STANDARD"
worker_type = local.reporting_hub_worker_type
number_of_workers = local.reporting_hub_num_workers
max_concurrent = 1
region = local.account_region
account = local.account_id
log_group_retention_in_days = 1
script_location = local.glue_placeholder_script_location
enable_continuous_log_filter = false
project_id = local.project
aws_kms_key = local.s3_kms_arn
additional_policies = module.kinesis_stream_ingestor.kinesis_stream_iam_policy_admin_arn
execution_class = "STANDARD"
worker_type = local.reporting_hub_worker_type
number_of_workers = local.reporting_hub_num_workers
max_concurrent = 1
region = local.account_region
account = local.account_id
log_group_retention_in_days = 1

tags = merge(
local.all_tags,
Expand Down Expand Up @@ -88,19 +88,19 @@ module "glue_domain_refresh_job" {
checkpoint_dir = "s3://${module.s3_glue_job_bucket.bucket_id}/checkpoint/${local.project}-domain-refresh-${local.env}/"
spark_event_logs = "s3://${module.s3_glue_job_bucket.bucket_id}/spark-logs/${local.project}-domain-refresh-${local.env}/"
# Placeholder Script Location
script_location = local.glue_placeholder_script_location
enable_continuous_log_filter = false
project_id = local.project
aws_kms_key = local.s3_kms_arn
additional_policies = module.kinesis_stream_ingestor.kinesis_stream_iam_policy_admin_arn
script_location = local.glue_placeholder_script_location
enable_continuous_log_filter = false
project_id = local.project
aws_kms_key = local.s3_kms_arn
additional_policies = module.kinesis_stream_ingestor.kinesis_stream_iam_policy_admin_arn
# timeout = 1440
execution_class = "FLEX"
worker_type = local.refresh_job_worker_type
number_of_workers = local.refresh_job_num_workers
max_concurrent = 64
region = local.account_region
account = local.account_id
log_group_retention_in_days = 1
execution_class = "FLEX"
worker_type = local.refresh_job_worker_type
number_of_workers = local.refresh_job_num_workers
max_concurrent = 64
region = local.account_region
account = local.account_id
log_group_retention_in_days = 1

tags = merge(
local.all_tags,
Expand Down Expand Up @@ -174,7 +174,7 @@ module "glue_registry_avro" {
source = "./modules/glue_registry"
enable_glue_registry = true
name = "${local.project}-glue-registry-avro-${local.env}"
tags = merge(
tags = merge(
local.all_tags,
{
Name = "${local.project}-glue-registry-avro-${local.env}"
Expand All @@ -200,7 +200,7 @@ module "glue_raw_table" {
# AWS Glue catalog table
glue_catalog_table_description = "Glue Table for raw data, managed by Terraform."
glue_catalog_table_table_type = "EXTERNAL_TABLE"
glue_catalog_table_parameters = {
glue_catalog_table_parameters = {
EXTERNAL = "TRUE"
"parquet.compression" = "SNAPPY"
"classification" = "parquet"
Expand Down Expand Up @@ -262,7 +262,7 @@ module "glue_reconciliation_table" {
# AWS Glue catalog table
glue_catalog_table_description = "Glue Table for reconciliation data, managed by Terraform."
glue_catalog_table_table_type = "EXTERNAL_TABLE"
glue_catalog_table_parameters = {
glue_catalog_table_parameters = {
EXTERNAL = "TRUE"
"parquet.compression" = "SNAPPY"
"classification" = "parquet"
Expand Down Expand Up @@ -600,9 +600,9 @@ module "ec2_kinesis_agent" {
ebs_encrypted = true
ebs_delete_on_termination = false
# s3_policy_arn = aws_iam_policy.read_s3_read_access_policy.arn # TBC
region = local.account_region
account = local.account_id
env = local.env
region = local.account_region
account = local.account_id
env = local.env


tags = merge(
Expand Down Expand Up @@ -632,19 +632,19 @@ module "datamart" {
create_subnet_group = true
kms_key_arn = aws_kms_key.redshift-kms-key.arn
enhanced_vpc_routing = false
subnet_ids = [
subnet_ids = [
data.aws_subnet.private_subnets_a.id, data.aws_subnet.private_subnets_b.id, data.aws_subnet.private_subnets_c.id
]
vpc = data.aws_vpc.shared.id
cidr = [data.aws_vpc.shared.cidr_block, local.cloud_platform_cidr]
iam_role_arns = [aws_iam_role.redshift-role.arn, aws_iam_role.redshift-spectrum-role.arn]
vpc = data.aws_vpc.shared.id
cidr = [data.aws_vpc.shared.cidr_block, local.cloud_platform_cidr]
iam_role_arns = [aws_iam_role.redshift-role.arn, aws_iam_role.redshift-spectrum-role.arn]

# Endpoint access - only available when using the ra3.x type, for S3 Simple Service
create_endpoint_access = false

# Scheduled actions
create_scheduled_action_iam_role = true
scheduled_actions = {
scheduled_actions = {
pause = {
name = "${local.redshift_cluster_name}-pause"
description = "Pause cluster every night"
Expand Down Expand Up @@ -698,7 +698,7 @@ module "dms_nomis_ingestor" {
migration_type = "full-load-and-cdc"
replication_instance_version = "3.4.7" # Upgrade
replication_instance_class = "dms.t3.medium"
subnet_ids = [
subnet_ids = [
data.aws_subnet.data_subnets_a.id, data.aws_subnet.data_subnets_b.id, data.aws_subnet.data_subnets_c.id
]

Expand Down Expand Up @@ -750,7 +750,7 @@ module "dms_fake_data_ingestor" {
migration_type = "full-load-and-cdc"
replication_instance_version = "3.4.7" # Rollback
replication_instance_class = "dms.t3.medium"
subnet_ids = [
subnet_ids = [
data.aws_subnet.data_subnets_a.id, data.aws_subnet.data_subnets_b.id, data.aws_subnet.data_subnets_c.id
]

Expand Down
Loading

0 comments on commit ca32ce7

Please sign in to comment.