Skip to content

Commit

Permalink
GCP: Automatic setup of the backup configuration
Browse files Browse the repository at this point in the history
  • Loading branch information
vitabaks committed Nov 27, 2023
1 parent f7e8bc8 commit 108a483
Show file tree
Hide file tree
Showing 4 changed files with 149 additions and 4 deletions.
8 changes: 7 additions & 1 deletion roles/cloud-resources/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,17 @@ database_public_access: false # Allow access to the database from the public ne
database_public_allowed_ips: "" # (comma-separated list of IP addresses in CIDR format) If empty, then public access is allowed for any IP address.

# Backups (if 'pgbackrest_install' or 'wal_g_install' is 'true')
aws_s3_bucket_create: true
aws_s3_bucket_create: true # if 'provision=aws'
aws_s3_bucket_name: "backups-{{ patroni_cluster_name }}" # Name of the S3 bucket. Bucket naming rules: https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html
aws_s3_bucket_region: "{{ server_location }}" # The AWS region to use.
aws_s3_bucket_object_lock_enabled: false # Whether S3 Object Lock to be enabled.
aws_s3_bucket_encryption: "AES256" # Describes the default server-side encryption to apply to new objects in the bucket. Choices: "AES256", "aws:kms"
aws_s3_bucket_absent: true # Allow to delete S3 bucket when deleting a cluster servers using the 'state=absent' variable.

gcp_bucket_create: true # if 'provision=gcp'
gcp_bucket_name: "backups-{{ patroni_cluster_name }}" # Name of the GCS bucket.
gcp_bucket_storage_class: "MULTI_REGIONAL" # The bucket’s default storage class. Values include: MULTI_REGIONAL, REGIONAL, STANDARD, NEARLINE, COLDLINE, ARCHIVE, DURABLE_REDUCED_AVAILABILITY.
gcp_bucket_default_object_acl: "projectPrivate" # Apply a predefined set of default object access controls to this bucket.
gcp_bucket_absent: true # Allow to delete GCS bucket when deleting a cluster servers using the 'state=absent' variable.

...
26 changes: 26 additions & 0 deletions roles/cloud-resources/tasks/gcp.yml
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,21 @@
}}
when: firewall | bool

# GCS Bucket
- name: "GCP: Create bucket '{{ gcp_bucket_name }}'"
google.cloud.gcp_storage_bucket:
auth_kind: "serviceaccount"
service_account_contents: "{{ lookup('ansible.builtin.env', 'GCP_SERVICE_ACCOUNT_CONTENTS') }}"
project: "{{ gcp_project | default(project_info.resources[0].projectNumber) }}"
name: "{{ gcp_bucket_name }}"
state: present
storage_class: "{{ gcp_bucket_storage_class }}"
predefined_default_object_acl: "{{ gcp_bucket_default_object_acl }}"
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- gcp_bucket_create | bool

# Server and volume
- name: "GCP: Create or modify VM instance"
google.cloud.gcp_compute_instance:
auth_kind: "serviceaccount"
Expand Down Expand Up @@ -313,6 +328,17 @@
project: "{{ gcp_project | default(project_info.resources[0].projectNumber) }}"
name: "{{ patroni_cluster_name }}-firewall"
state: absent

- name: "GCP: Delete bucket '{{ gcp_bucket_name }}'"
google.cloud.gcp_storage_bucket:
auth_kind: "serviceaccount"
service_account_contents: "{{ lookup('ansible.builtin.env', 'GCP_SERVICE_ACCOUNT_CONTENTS') }}"
project: "{{ gcp_project | default(project_info.resources[0].projectNumber) }}"
name: "{{ gcp_bucket_name }}"
state: absent
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- gcp_bucket_absent | bool
when: state == 'absent'

...
65 changes: 64 additions & 1 deletion roles/pgbackrest/tasks/auto_conf.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---

# AWS S3 bucket
# AWS S3 bucket (if 'provision=aws')
- name: "Set variable 'pgbackrest_conf' for backup in the AWS S3 bucket"
ansible.builtin.set_fact:
pgbackrest_conf:
Expand Down Expand Up @@ -31,6 +31,69 @@
- { option: "log-level-console", value: "info" }
- { option: "recovery-option", value: "recovery_target_action=promote" }
- { option: "pg1-path", value: "{{ pg_data_mount_path | default('/pgdata') }}/{{ postgresql_version }}/{{ patroni_cluster_name }}" }
no_log: true # do not output contents to the ansible log
when: provision | default('') | lower == 'aws'

# GCS Bucket (if 'provision=gcp')
- block:
- name: "Set variable 'pgbackrest_conf' for backup in the GCS Bucket"
ansible.builtin.set_fact:
pgbackrest_conf:
global:
- { option: "log-level-file", value: "detail" }
- { option: "log-path", value: "/var/log/pgbackrest" }
- { option: "repo1-type", value: "gcs" }
- { option: "repo1-path", value: "{{ PGBACKREST_REPO_PATH | default('/pgbackrest') }}" }
- { option: "repo1-gcs-key", value: "{{ PGBACKREST_GCS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}" }
- { option: "repo1-gcs-bucket", value: "{{ PGBACKREST_GCS_BUCKET | default(gcp_bucket_name | default('backups-' + patroni_cluster_name)) }}" }
- { option: "repo1-retention-full", value: "{{ PGBACKREST_RETENTION_FULL | default('4') }}" }
- { option: "repo1-retention-archive", value: "{{ PGBACKREST_RETENTION_ARCHIVE | default('4') }}" }
- { option: "repo1-retention-archive-type", value: "{{ PGBACKREST_RETENTION_ARCHIVE_TYPE | default('full') }}" }
- { option: "repo1-bundle", value: "y" }
- { option: "repo1-block", value: "y" }
- { option: "start-fast", value: "y" }
- { option: "stop-auto", value: "y" }
- { option: "link-all", value: "y" }
- { option: "resume", value: "n" }
- { option: "archive-async", value: "y" }
- { option: "archive-get-queue-max", value: "1GiB" }
- { option: "spool-path", value: "/var/spool/pgbackrest" }
- { option: "process-max", value: "{{ PGBACKREST_PROCESS_MAX | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
stanza:
- { option: "log-level-console", value: "info" }
- { option: "recovery-option", value: "recovery_target_action=promote" }
- { option: "pg1-path", value: "{{ pg_data_mount_path | default('/pgdata') }}/{{ postgresql_version }}/{{ patroni_cluster_name }}" }
no_log: true # do not output contents to the ansible log

# if 'gcs_key_file' is not defined, copy GCS key file from GCP_SERVICE_ACCOUNT_CONTENTS environment variable.
- block:
- name: "Get GCP service account contents from localhost"
ansible.builtin.set_fact:
gcp_service_account_contents: "{{ lookup('ansible.builtin.env', 'GCP_SERVICE_ACCOUNT_CONTENTS') }}"
delegate_to: localhost
run_once: true # noqa run-once
no_log: true # do not output GCP service account contents to the ansible log

- name: "Copy GCP service account contents to {{ PGBACKREST_GCS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
ansible.builtin.copy:
content: "{{ gcp_service_account_contents }}"
dest: "{{ PGBACKREST_GCS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
mode: '0600'
owner: "postgres"
group: "postgres"
no_log: true # do not output GCP service account contents to the ansible log
when: gcs_key_file is not defined

# if 'gcs_key_file' is defined, copy this GCS key file.
- name: "Copy GCS key file to {{ PGBACKREST_GCS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
ansible.builtin.copy:
src: "{{ gcs_key_file }}"
dest: "{{ PGBACKREST_GCS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
mode: '0600'
owner: "postgres"
group: "postgres"
no_log: true # do not output GCP service account contents to the ansible log
when: gcs_key_file is defined and gcs_key_file | length > 0
when: provision | default('') | lower == 'gcp'

...
54 changes: 52 additions & 2 deletions roles/wal-g/tasks/auto_conf.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
---

# AWS S3 bucket
# AWS S3 bucket (if 'provision=aws')
- name: "Set variable 'wal_g_json' for backup in the AWS S3 bucket"
ansible.builtin.set_fact:
wal_g_json:
- { option: "AWS_ACCESS_KEY_ID", value: "{{ WALG_AWS_ACCESS_KEY_ID | default('') }}" }
- { option: "AWS_SECRET_ACCESS_KEY", value: "{{ WALG_AWS_SECRET_ACCESS_KEY | default('') }}" }
- { option: "WALG_S3_PREFIX", value: "{{ WALG_S3_PREFIX | default('s3://backups-' + patroni_cluster_name) }}" }
- { option: "WALG_S3_PREFIX", value: "{{ WALG_S3_PREFIX | default('s3://' + (aws_s3_bucket_name | default('backups-' + patroni_cluster_name))) }}" }
- { option: "AWS_REGION", value: "{{ WALG_AWS_REGION | default(aws_s3_bucket_region | default(server_location)) }}" }
- { option: "WALG_COMPRESSION_METHOD", value: "{{ WALG_COMPRESSION_METHOD | default('brotli') }}" }
- { option: "WALG_DELTA_MAX_STEPS", value: "{{ WALG_DELTA_MAX_STEPS | default('6') }}" }
Expand All @@ -17,6 +17,56 @@
- { option: "PGHOST", value: "{{ postgresql_unix_socket_dir | default('/var/run/postgresql') }}" }
- { option: "PGPORT", value: "{{ postgresql_port | default('5432') }}" }
- { option: "PGUSER", value: "{{ patroni_superuser_username | default('postgres') }}" }
no_log: true # do not output contents to the ansible log
when: provision | default('') | lower == 'aws'

# GCS Bucket (if 'provision=gcp')
- block:
- name: "Set variable 'wal_g_json' for backup in the AWS S3 bucket"
ansible.builtin.set_fact:
wal_g_json:
- { option: "GOOGLE_APPLICATION_CREDENTIALS", value: "{{ WALG_GS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}" }
- { option: "WALG_GS_PREFIX", value: "{{ WALG_GS_PREFIX | default('gs://' + (gcp_bucket_name | default('backups-' + patroni_cluster_name))) }}" }
- { option: "WALG_COMPRESSION_METHOD", value: "{{ WALG_COMPRESSION_METHOD | default('brotli') }}" }
- { option: "WALG_DELTA_MAX_STEPS", value: "{{ WALG_DELTA_MAX_STEPS | default('6') }}" }
- { option: "WALG_DOWNLOAD_CONCURRENCY", value: "{{ WALG_DOWNLOAD_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "WALG_UPLOAD_CONCURRENCY", value: "{{ WALG_UPLOAD_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "WALG_UPLOAD_DISK_CONCURRENCY", value: "{{ WALG_UPLOAD_DISK_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "PGDATA", value: "{{ pg_data_mount_path | default('/pgdata') }}/{{ postgresql_version }}/{{ patroni_cluster_name }}" }
- { option: "PGHOST", value: "{{ postgresql_unix_socket_dir | default('/var/run/postgresql') }}" }
- { option: "PGPORT", value: "{{ postgresql_port | default('5432') }}" }
- { option: "PGUSER", value: "{{ patroni_superuser_username | default('postgres') }}" }
no_log: true # do not output contents to the ansible log

# if 'gcs_key_file' is not defined, copy GCS key file from GCP_SERVICE_ACCOUNT_CONTENTS environment variable.
- block:
- name: "Get GCP service account contents from localhost"
ansible.builtin.set_fact:
gcp_service_account_contents: "{{ lookup('ansible.builtin.env', 'GCP_SERVICE_ACCOUNT_CONTENTS') }}"
delegate_to: localhost
run_once: true # noqa run-once
no_log: true # do not output GCP service account contents to the ansible log

- name: "Copy GCP service account contents to {{ WALG_GS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
ansible.builtin.copy:
content: "{{ gcp_service_account_contents }}"
dest: "{{ WALG_GS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
mode: '0600'
owner: "postgres"
group: "postgres"
no_log: true # do not output GCP service account contents to the ansible log
when: gcs_key_file is not defined

# if 'gcs_key_file' is defined, copy this GCS key file.
- name: "Copy GCS key file to {{ WALG_GS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
ansible.builtin.copy:
src: "{{ gcs_key_file }}"
dest: "{{ WALG_GS_KEY | default(postgresql_home_dir + '/gcs-key.json') }}"
mode: '0600'
owner: "postgres"
group: "postgres"
no_log: true # do not output GCP service account contents to the ansible log
when: gcs_key_file is defined and gcs_key_file | length > 0
when: provision | default('') | lower == 'gcp'

...

0 comments on commit 108a483

Please sign in to comment.