Skip to content

Commit

Permalink
backup/recovery: logging (WIP) (#1277)
Browse files Browse the repository at this point in the history
- work in progress, does not work with elasticsearch clusters yet
  • Loading branch information
sk4zuzu committed May 27, 2020
1 parent db98366 commit a83eb75
Show file tree
Hide file tree
Showing 14 changed files with 558 additions and 5 deletions.
4 changes: 2 additions & 2 deletions core/src/epicli/cli/epicli.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def backup_parser(subparsers):
sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True,
help='Absolute path to directory with build artifacts.')

available_components = {'kubernetes', 'loadbalancer', 'postgresql', 'monitoring'}
available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql'}

enabled_components = set(available_components) # enable everything by default
enabled_components_joined = ','.join(sorted(enabled_components))
Expand All @@ -309,7 +309,7 @@ def recovery_parser(subparsers):
sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True,
help='Absolute path to directory with build artifacts.')

available_components = {'kubernetes', 'loadbalancer', 'postgresql', 'monitoring'}
available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql'}

enabled_components = set() # disable everything by default
enabled_components_joined = ','.join(sorted(enabled_components))
Expand Down
29 changes: 29 additions & 0 deletions core/src/epicli/data/common/ansible/playbooks/backup_logging.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
# Ansible playbook for backing up logging data

- hosts: logging[0]
gather_facts: true
become: true
become_method: sudo
serial: 1
tasks:
- import_role:
name: backup
tasks_from: logging_elasticsearch_snapshot
- import_role:
name: backup
tasks_from: logging_elasticsearch_etc
vars_files:
- roles/logging/vars/main.yml

- hosts: kibana[0]
gather_facts: true
become: true
become_method: sudo
serial: 1
tasks:
- import_role:
name: backup
tasks_from: logging_kibana_etc
vars_files:
- roles/kibana/vars/main.yml
28 changes: 28 additions & 0 deletions core/src/epicli/data/common/ansible/playbooks/recovery_logging.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
# Ansible playbook for recovering logging data

- hosts: logging[0]
become: true
become_method: sudo
serial: 1
tasks:
- import_role:
name: recovery
tasks_from: logging_elasticsearch_etc
- import_role:
name: recovery
tasks_from: logging_elasticsearch_snapshot
vars_files:
- roles/logging/vars/main.yml

- hosts: kibana[0]
gather_facts: true
become: true
become_method: sudo
serial: 1
tasks:
- import_role:
name: recovery
tasks_from: logging_kibana_etc
vars_files:
- roles/kibana/vars/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
backup_dir: /epibackup
backup_destination_dir: "{{ backup_dir }}/mounted"
backup_destination_host: "{{ groups.kubernetes_master.0 }}"
elasticsearch_snapshot_repository_name: epiphany
elasticsearch_snapshot_repository_location: /var/lib/elasticsearch-snapshots
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
---
- name: Assert that the "snapshot_name" fact is defined and valid
assert:
that:
- snapshot_name is defined
- snapshot_name is string
- snapshot_name | length > 0
fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string.

- name: Create and copy etc archive to backup destination
always:
- name: Delete etc archive (cleanup)
file:
path: "{{ item }}"
state: absent
loop:
- "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1"

block:
- name: Ensure backup dir exists
file:
path: "{{ backup_dir }}/"
state: directory

- name: Create etc archive
archive:
dest: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz"
path: /etc/elasticsearch/ # keep the / here!
format: gz

- name: Calculate checksum from etc archive
stat:
path: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz"
get_attributes: false
get_checksum: true
get_mime: false
checksum_algorithm: sha1
register: stat_elasticsearch_etc_archive

- name: Store etc archive checksum in a file
copy:
dest: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1"
content: |
{{ stat_elasticsearch_etc_archive.stat.checksum }} elasticsearch_etc_{{ snapshot_name }}.tar.gz
- name: Transfer etc archive via rsync
import_tasks: download_via_rsync.yml
vars:
artifacts:
- "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1"
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
---
- name: Set helper facts
set_fact:
elasticsearch_endpoint: >-
https://{{ ansible_default_ipv4.address }}:9200
snapshot_name: >-
{{ ansible_date_time.iso8601_basic_short | replace('T','-') }}
vars:
uri_template: &uri
client_cert: /etc/elasticsearch/kirk.pem
client_key: /etc/elasticsearch/kirk-key.pem
validate_certs: false
body_format: json

- debug: var=snapshot_name

- name: Check cluster health
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_cluster/health"
method: GET
register: uri_response
until: uri_response is success
retries: 12
delay: 5

- name: Ensure snapshot repository is defined
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}"
method: PUT
body:
type: fs
settings:
location: "{{ elasticsearch_snapshot_repository_location }}"
compress: true

- name: Trigger snapshot creation
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}/{{ snapshot_name }}"
method: PUT

- name: Wait (up to 12h) for snapshot completion
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}/{{ snapshot_name }}"
method: GET
register: uri_response
until: (uri_response.json.snapshots | selectattr('snapshot', 'equalto', snapshot_name) | first).state == "SUCCESS"
retries: "{{ (12 * 3600 // 10) | int }}" # 12h
delay: 10

- name: Find all snapshots
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}/_all"
method: GET
register: uri_response

- name: Delete old snapshots
uri:
<<: *uri
url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}/{{ item }}"
method: DELETE
loop: >-
{{ uri_response.json.snapshots | map(attribute='snapshot') | reject('equalto', snapshot_name) | list }}
- name: Create and copy snapshot archive to backup destination
always:
- name: Delete snapshot archive (cleanup)
file:
path: "{{ item }}"
state: absent
loop:
- "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1"

block:
- name: Ensure backup dir exists
file:
path: "{{ backup_dir }}/"
state: directory

- name: Create snapshot archive
archive:
dest: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz"
path: "{{ elasticsearch_snapshot_repository_location }}/*"
format: gz

- name: Calculate checksum from snapshot archive
stat:
path: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz"
get_attributes: false
get_checksum: true
get_mime: false
checksum_algorithm: sha1
register: stat_snapshot_archive

- name: Store snapshot archive checksum in a file
copy:
dest: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1"
content: |
{{ stat_snapshot_archive.stat.checksum }} elasticsearch_snapshot_{{ snapshot_name }}.tar.gz
- name: Transfer snapshot archive via rsync
import_tasks: download_via_rsync.yml
vars:
artifacts:
- "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1"
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
---
- name: Assert that the "snapshot_name" fact is defined and valid
assert:
that:
- snapshot_name is defined
- snapshot_name is string
- snapshot_name | length > 0
fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string.

- name: Create and copy etc archive to backup destination
always:
- name: Delete etc archive (cleanup)
file:
path: "{{ item }}"
state: absent
loop:
- "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1"

block:
- name: Ensure backup dir exists
file:
path: "{{ backup_dir }}/"
state: directory

- name: Create etc archive
archive:
dest: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz"
path: /etc/kibana/ # keep the / here!
format: gz

- name: Calculate checksum from etc archive
stat:
path: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz"
get_attributes: false
get_checksum: true
get_mime: false
checksum_algorithm: sha1
register: stat_etc_archive

- name: Store etc archive checksum in a file
copy:
dest: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1"
content: |
{{ stat_etc_archive.stat.checksum }} kibana_etc_{{ snapshot_name }}.tar.gz
- name: Transfer etc archive via rsync
import_tasks: download_via_rsync.yml
vars:
artifacts:
- "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz"
- "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1"
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
---
- name: Ensure snapshot folder exists
file:
path: "{{ specification.paths.repo }}/"
state: directory
owner: elasticsearch
group: elasticsearch
mode: u=rwx,go=

- name: Create Elasticsearch configuration file
template:
Expand All @@ -19,4 +26,4 @@
systemd:
name: elasticsearch
state: started
enabled: yes
enabled: yes
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ node.name: {{ansible_hostname}}
#
path.data: {{specification.paths.data}}
#
# Path to directory where to store the data:
#
path.repo: {{specification.paths.repo}}
#
# Path to log files:
#
path.logs: {{specification.paths.logs}}
Expand All @@ -52,7 +56,7 @@ path.logs: {{specification.paths.logs}}
#
# Set the bind address to a specific IP (IPv4 or IPv6):
#
network.host: {{ansible_hostname}}
network.host: {{ansible_hostname}}
#
# Set a custom port for HTTP:
#
Expand Down Expand Up @@ -113,4 +117,4 @@ opendistro_security.check_snapshot_restore_write_privileges: true
opendistro_security.restapi.roles_enabled: ["all_access", "security_rest_api_access"]
cluster.routing.allocation.disk.threshold_enabled: false
node.max_local_storage_nodes: 3
######## End OpenDistro for Elasticsearch Security Demo Configuration ########
######## End OpenDistro for Elasticsearch Security Demo Configuration ########
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
recovery_dir: /epibackup
recovery_source_dir: "{{ recovery_dir }}/mounted"
recovery_source_host: "{{ groups.kubernetes_master.0 }}"
elasticsearch_snapshot_repository_name: epiphany
elasticsearch_snapshot_repository_location: /var/lib/elasticsearch-snapshots
Loading

0 comments on commit a83eb75

Please sign in to comment.