diff --git a/core/src/epicli/cli/engine/PatchEngine.py b/core/src/epicli/cli/engine/PatchEngine.py index 28aece5d41..e1b893bb8f 100644 --- a/core/src/epicli/cli/engine/PatchEngine.py +++ b/core/src/epicli/cli/engine/PatchEngine.py @@ -1,17 +1,35 @@ import os +from cli.helpers.Config import Config +from cli.helpers.Log import Log from cli.helpers.Step import Step + +from cli.helpers.build_saver import get_build_path, get_inventory_path_for_build +from cli.helpers.build_saver import copy_files_recursively, copy_file +from cli.helpers.yaml_helpers import safe_load_all, dump +from cli.helpers.doc_list_helpers import select_single +from cli.helpers.argparse_helpers import components_to_dict + +from cli.engine.schema.DefaultMerger import DefaultMerger +from cli.engine.schema.SchemaValidator import SchemaValidator + from cli.engine.ansible.AnsibleCommand import AnsibleCommand from cli.engine.ansible.AnsibleRunner import AnsibleRunner -from cli.helpers.Config import Config -from cli.helpers.build_saver import copy_files_recursively, copy_file, get_inventory_path_for_build class PatchEngine(Step): + """Perform backup and recovery operations.""" + def __init__(self, input_data): super().__init__(__name__) - self.build_directory = input_data.build_directory - self.components = input_data.components + self.file = input_data.file + self.parsed_components = None if input_data.components is None else set(input_data.components) + self.component_dict = dict() + self.input_docs = list() + self.cluster_model = None + self.backup_doc = None + self.recovery_doc = None + self.build_directory = None self.ansible_command = AnsibleCommand() def __enter__(self): @@ -21,29 +39,103 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, traceback): super().__exit__(exc_type, exc_value, traceback) + def _process_input_docs(self): + # Load the user input YAML docs from the input file + if os.path.isabs(self.file): + path_to_load = self.file + else: + path_to_load = os.path.join(os.getcwd(), self.file) + user_file_stream = open(path_to_load, 'r') + self.input_docs = safe_load_all(user_file_stream) + + # Merge the input docs with defaults + with DefaultMerger(self.input_docs) as doc_merger: + self.input_docs = doc_merger.run() + + # Get the cluster model + self.cluster_model = select_single(self.input_docs, lambda x: x.kind == 'epiphany-cluster') + if self.cluster_model is None: + raise Exception('No cluster model defined in input YAML file') + + # Validate input documents + with SchemaValidator(self.cluster_model, self.input_docs) as schema_validator: + schema_validator.run() + + # Get backup config document + self.backup_doc = select_single(self.input_docs, lambda x: x.kind == 'configuration/backup') + + # Get recovery config document + self.recovery_doc = select_single(self.input_docs, lambda x: x.kind == 'configuration/recovery') + + # Derive the build directory path + self.build_directory = get_build_path(self.cluster_model.specification.name) + + def _process_component_config(self, document): + if self.parsed_components is not None: + available_components = set(document.specification.components.keys()) + self.component_dict = components_to_dict(self.parsed_components, available_components) + def backup(self): - for component in sorted(self.components): - self.upgrade_patch_files_and_run('backup', component) + """Backup all enabled components.""" + + self._process_input_docs() + self._process_component_config(self.backup_doc) + self._update_role_files_and_vars('backup', self.backup_doc) + + # Execute all enabled component playbooks sequentially + for component_name, component_config in sorted(self.backup_doc.specification.components.items()): + if self.component_dict: + # Override yaml config with command line parameters + if self.component_dict[component_name]: + self._update_playbook_files_and_run('backup', component_name) + else: + if component_config.enabled: + self._update_playbook_files_and_run('backup', component_name) + return 0 def recovery(self): - for component in sorted(self.components): - self.upgrade_patch_files_and_run('recovery', component) + """Recover all enabled components.""" + + self._process_input_docs() + self._process_component_config(self.recovery_doc) + self._update_role_files_and_vars('recovery', self.recovery_doc) + + # Execute all enabled component playbooks sequentially + for component_name, component_config in sorted(self.recovery_doc.specification.components.items()): + if self.component_dict: + # Override yaml config with command line parameters + if self.component_dict[component_name]: + self._update_playbook_files_and_run('recovery', component_name) + else: + if component_config.enabled: + self._update_playbook_files_and_run('recovery', component_name) + return 0 - def upgrade_patch_files_and_run(self, action, component): - self.logger.info(f'Running {action} on {component}...') + def _update_role_files_and_vars(self, action, document): + self.logger.info(f'Updating {action} role files...') - #copy role files + # Copy role files roles_build_path = os.path.join(self.build_directory, 'ansible/roles', action) roles_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH, 'roles', action) copy_files_recursively(roles_source_path, roles_build_path) - #copy playbook file - playbook_build_path = os.path.join(self.build_directory, 'ansible/') + action + '_' + component + '.yml' - playbook_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH) + action + '_' + component + '.yml' + # Render role vars + vars_dir = os.path.join(roles_build_path, 'vars') + os.makedirs(vars_dir, exist_ok=True) + vars_file_path = os.path.join(vars_dir, 'main.yml') + with open(vars_file_path, 'w') as stream: + dump(document, stream) + + def _update_playbook_files_and_run(self, action, component): + self.logger.info(f'Running {action} on {component}...') + + # Copy playbook file + playbook_build_path = os.path.join(self.build_directory, 'ansible', f'{action}_{component}.yml') + playbook_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH, f'{action}_{component}.yml') copy_file(playbook_source_path, playbook_build_path) - #run the playbook + # Run the playbook inventory_path = get_inventory_path_for_build(self.build_directory) self.ansible_command.run_playbook(inventory=inventory_path, playbook_path=playbook_build_path) diff --git a/core/src/epicli/cli/epicli.py b/core/src/epicli/cli/epicli.py index 7505ec9716..8918ba44d3 100644 --- a/core/src/epicli/cli/epicli.py +++ b/core/src/epicli/cli/epicli.py @@ -24,6 +24,7 @@ from cli.helpers.query_yes_no import query_yes_no from cli.helpers.input_query import prompt_for_password from cli.helpers.build_saver import save_to_file, get_output_path +from cli.helpers.argparse_helpers import get_component_parser from cli.engine.spec.SpecCommand import SpecCommand @@ -262,41 +263,22 @@ def run_validate(args): ''' -def _component_parser_for(available_components={}): - def parse_components(value): - parsed_items = set( - item_stripped - for item in value.split(',') - for item_stripped in [item.strip()] - if item_stripped - ) - if len(parsed_items) == 1 and 'all' in parsed_items: - return set(available_components) - difference = parsed_items - set(available_components) - if difference: - raise Exception('Error parsing components: invalid values present') - return parsed_items - return parse_components - - def backup_parser(subparsers): - sub_parser = subparsers.add_parser('backup', - description='[Experimental]: Backups existing Epiphany Platform components.') - sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True, - help='Absolute path to directory with build artifacts.') + """Configure and execute backup of cluster components.""" - available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'} + sub_parser = subparsers.add_parser('backup', + description='Create backup of cluster components.') + sub_parser.add_argument('-f', '--file', dest='file', type=str, + help='File with infrastructure/configuration definitions to use.') - enabled_components = set(available_components) # enable everything by default - enabled_components_joined = ','.join(sorted(enabled_components)) + available_components = {'kubernetes', 'load_balancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'} - sub_parser.add_argument('-c', '--components', dest='components', type=_component_parser_for(available_components), required=False, - help=f'Specify comma-separated list of components to backup (defaults to "{enabled_components_joined}").', - default=enabled_components_joined) + sub_parser.add_argument('-c', '--components', dest='components', type=get_component_parser(available_components), required=False, + help=f'Specify comma-separated list of components to backup.', + default=None) # "None" indicates that the yaml config will be used def run_backup(args): - experimental_query() - adjust_paths_from_build(args) + adjust_paths_from_file(args) with PatchEngine(args) as engine: return engine.backup() @@ -304,23 +286,23 @@ def run_backup(args): def recovery_parser(subparsers): - sub_parser = subparsers.add_parser('recovery', - description='[Experimental]: Recover from existing backup.') - sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True, - help='Absolute path to directory with build artifacts.') + """Configure and execute recovery of cluster components.""" - available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'} + sub_parser = subparsers.add_parser('recovery', + description='Recover from existing backup.') + sub_parser.add_argument('-f', '--file', dest='file', type=str, + help='File with infrastructure/configuration definitions to use.') - enabled_components = set() # disable everything by default - enabled_components_joined = ','.join(sorted(enabled_components)) + available_components = {'kubernetes', 'load_balancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'} - sub_parser.add_argument('-c', '--components', dest='components', type=_component_parser_for(available_components), required=False, - help=f'Specify comma-separated list of components to restore (defaults to "{enabled_components_joined}").', - default=enabled_components_joined) + sub_parser.add_argument('-c', '--components', dest='components', type=get_component_parser(available_components), required=False, + help=f'Specify comma-separated list of components to recover.', + default=None) # "None" indicates that the yaml config will be used def run_recovery(args): - experimental_query() - adjust_paths_from_build(args) + if not query_yes_no('Do you really want to perform recovery?'): + return 0 + adjust_paths_from_file(args) with PatchEngine(args) as engine: return engine.recovery() diff --git a/core/src/epicli/cli/helpers/argparse_helpers.py b/core/src/epicli/cli/helpers/argparse_helpers.py new file mode 100644 index 0000000000..216d0ea068 --- /dev/null +++ b/core/src/epicli/cli/helpers/argparse_helpers.py @@ -0,0 +1,43 @@ +from cli.helpers.ObjDict import ObjDict + + +def get_component_parser(available_components): + """Return comma-separated component list parser.""" + + def parse_components(value): + parsed_items = set( + item_stripped + for item in value.split(',') + for item_stripped in [item.strip()] + if item_stripped + ) + + if len(parsed_items) == 1: + if 'all' in parsed_items: + return set(available_components) + if 'none' in parsed_items: + return set() + + difference = parsed_items - set(available_components) + if difference: + raise Exception('Error parsing components: invalid values present') + + return parsed_items + + return parse_components + + +def components_to_dict(parsed_components, available_components): + """Return an ObjDict of component -> boolean value pairs (enabled/disabled).""" + + parsed_components = frozenset(parsed_components) + available_components = frozenset(available_components) + + difference = parsed_components - available_components + if difference: + raise Exception('Error parsing components: invalid values present') + + return ObjDict( + (component_name, component_name in parsed_components) + for component_name in available_components + ) diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_kubernetes.yml b/core/src/epicli/data/common/ansible/playbooks/backup_kubernetes.yml index 5e2c8e0230..6922569b1d 100644 --- a/core/src/epicli/data/common/ansible/playbooks/backup_kubernetes.yml +++ b/core/src/epicli/data/common/ansible/playbooks/backup_kubernetes.yml @@ -6,6 +6,8 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: kubernetes + - when: specification.components.kubernetes.enabled | default(false) + block: + - import_role: + name: backup + tasks_from: kubernetes diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_load_balancer.yml b/core/src/epicli/data/common/ansible/playbooks/backup_load_balancer.yml new file mode 100644 index 0000000000..baa0e9695b --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/backup_load_balancer.yml @@ -0,0 +1,16 @@ +--- +# Ansible playbook for backing up load_balancer config + +- hosts: haproxy[0] + become: true + become_method: sudo + serial: 1 + tasks: + - when: specification.components.load_balancer.enabled | default(false) + block: + - include_vars: + file: roles/haproxy/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: load_balancer_haproxy_etc diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_loadbalancer.yml b/core/src/epicli/data/common/ansible/playbooks/backup_loadbalancer.yml deleted file mode 100644 index 3d671c8eb1..0000000000 --- a/core/src/epicli/data/common/ansible/playbooks/backup_loadbalancer.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -# Ansible playbook for backing up loadbalancer config - -- hosts: haproxy[0] - become: true - become_method: sudo - serial: 1 - tasks: - - import_role: - name: backup - tasks_from: loadbalancer_haproxy_etc - vars_files: - - roles/haproxy/vars/main.yml diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_logging.yml b/core/src/epicli/data/common/ansible/playbooks/backup_logging.yml index 2fe1f13e12..c1252ec696 100644 --- a/core/src/epicli/data/common/ansible/playbooks/backup_logging.yml +++ b/core/src/epicli/data/common/ansible/playbooks/backup_logging.yml @@ -7,14 +7,17 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: logging_elasticsearch_snapshot - - import_role: - name: backup - tasks_from: logging_elasticsearch_etc - vars_files: - - roles/logging/vars/main.yml + - when: specification.components.logging.enabled | default(false) + block: + - include_vars: + file: roles/logging/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: logging_elasticsearch_snapshot + - import_role: + name: backup + tasks_from: logging_elasticsearch_etc - hosts: kibana[0] gather_facts: true @@ -22,8 +25,13 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: logging_kibana_etc - vars_files: - - roles/kibana/vars/main.yml + - when: specification.components.logging.enabled | default(false) + block: + - include_vars: + file: roles/kibana/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: logging_kibana_etc + vars: + snapshot_name: "{{ hostvars[groups.logging.0].snapshot_name }}" diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_monitoring.yml b/core/src/epicli/data/common/ansible/playbooks/backup_monitoring.yml index 65898cf93b..32b2f61fcf 100644 --- a/core/src/epicli/data/common/ansible/playbooks/backup_monitoring.yml +++ b/core/src/epicli/data/common/ansible/playbooks/backup_monitoring.yml @@ -2,28 +2,36 @@ # Ansible playbook for backing up monitoring data - hosts: prometheus[0] + gather_facts: true become: true become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: monitoring_prometheus_snapshot - - import_role: - name: backup - tasks_from: monitoring_prometheus_etc - vars_files: - - roles/prometheus/vars/main.yml + - when: specification.components.monitoring.enabled | default(false) + block: + - include_vars: + file: roles/prometheus/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: monitoring_prometheus_snapshot + - import_role: + name: backup + tasks_from: monitoring_prometheus_etc - hosts: grafana[0] + gather_facts: true become: true become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: monitoring_grafana_data - vars: - snapshot_name: "{{ hostvars[groups.prometheus.0].snapshot_name }}" - vars_files: - - roles/grafana/vars/main.yml + - when: specification.components.monitoring.enabled | default(false) + block: + - include_vars: + file: roles/grafana/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: monitoring_grafana_data + vars: + snapshot_name: "{{ hostvars[groups.prometheus.0].snapshot_name }}" diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_postgresql.yml b/core/src/epicli/data/common/ansible/playbooks/backup_postgresql.yml index 55d44374fe..ac5b72bcf8 100644 --- a/core/src/epicli/data/common/ansible/playbooks/backup_postgresql.yml +++ b/core/src/epicli/data/common/ansible/playbooks/backup_postgresql.yml @@ -5,11 +5,11 @@ become: true become_method: sudo tasks: - - import_role: - name: backup - tasks_from: postgresql - - import_role: - name: backup - tasks_from: download_via_rsync - vars_files: - - roles/postgresql/defaults/main.yml + - when: specification.components.postgresql.enabled | default(false) + block: + - include_vars: + file: roles/postgresql/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: postgresql diff --git a/core/src/epicli/data/common/ansible/playbooks/backup_rabbitmq.yml b/core/src/epicli/data/common/ansible/playbooks/backup_rabbitmq.yml index aae5320032..a15b4c692f 100644 --- a/core/src/epicli/data/common/ansible/playbooks/backup_rabbitmq.yml +++ b/core/src/epicli/data/common/ansible/playbooks/backup_rabbitmq.yml @@ -7,11 +7,14 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: backup - tasks_from: rabbitmq_rabbitmq_definitions - - import_role: - name: backup - tasks_from: rabbitmq_rabbitmq_etc - vars_files: - - roles/rabbitmq/vars/main.yml + - when: specification.components.rabbitmq.enabled | default(false) + block: + - include_vars: + file: roles/rabbitmq/vars/main.yml + name: component_vars + - import_role: + name: backup + tasks_from: rabbitmq_rabbitmq_definitions + - import_role: + name: backup + tasks_from: rabbitmq_rabbitmq_etc diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_kubernetes.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_kubernetes.yml index 182512690f..de2c945f37 100644 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_kubernetes.yml +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_kubernetes.yml @@ -6,6 +6,8 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: kubernetes + - when: specification.components.kubernetes.enabled | default(false) + block: + - import_role: + name: recovery + tasks_from: kubernetes diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_load_balancer.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_load_balancer.yml new file mode 100644 index 0000000000..015c757dfe --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_load_balancer.yml @@ -0,0 +1,16 @@ +--- +# Ansible playbook for recovering load_balancer config + +- hosts: haproxy[0] + become: true + become_method: sudo + serial: 1 + tasks: + - when: specification.components.load_balancer.enabled | default(false) + block: + - include_vars: + file: roles/haproxy/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: load_balancer_haproxy_etc diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_loadbalancer.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_loadbalancer.yml deleted file mode 100644 index cdf552af32..0000000000 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_loadbalancer.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -# Ansible playbook for recovering loadbalancer config - -- hosts: haproxy[0] - become: true - become_method: sudo - serial: 1 - tasks: - - import_role: - name: recovery - tasks_from: loadbalancer_haproxy_etc - vars_files: - - roles/haproxy/vars/main.yml diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_logging.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_logging.yml index 73878915b1..796d1c0bae 100644 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_logging.yml +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_logging.yml @@ -6,14 +6,17 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: logging_elasticsearch_etc - - import_role: - name: recovery - tasks_from: logging_elasticsearch_snapshot - vars_files: - - roles/logging/vars/main.yml + - when: specification.components.logging.enabled | default(false) + block: + - include_vars: + file: roles/logging/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: logging_elasticsearch_etc + - import_role: + name: recovery + tasks_from: logging_elasticsearch_snapshot - hosts: kibana[0] gather_facts: true @@ -21,8 +24,11 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: logging_kibana_etc - vars_files: - - roles/kibana/vars/main.yml + - when: specification.components.logging.enabled | default(false) + block: + - include_vars: + file: roles/kibana/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: logging_kibana_etc diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_monitoring.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_monitoring.yml index 8175ae0c6b..a0968eebfe 100644 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_monitoring.yml +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_monitoring.yml @@ -6,12 +6,17 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: monitoring_prometheus_etc - - import_role: - name: recovery - tasks_from: monitoring_prometheus_snapshot + - when: specification.components.monitoring.enabled | default(false) + block: + - include_vars: + file: roles/prometheus/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: monitoring_prometheus_etc + - import_role: + name: recovery + tasks_from: monitoring_prometheus_snapshot vars_files: - roles/prometheus/vars/main.yml @@ -20,8 +25,11 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: monitoring_grafana_data - vars_files: - - roles/grafana/vars/main.yml + - when: specification.components.monitoring.enabled | default(false) + block: + - include_vars: + file: roles/grafana/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: monitoring_grafana_data diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_postgresql.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_postgresql.yml index 052e9ea1a5..4ab980d712 100644 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_postgresql.yml +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_postgresql.yml @@ -6,6 +6,11 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: postgresql + - when: specification.components.postgresql.enabled | default(false) + block: + - include_vars: + file: roles/postgresql/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: postgresql diff --git a/core/src/epicli/data/common/ansible/playbooks/recovery_rabbitmq.yml b/core/src/epicli/data/common/ansible/playbooks/recovery_rabbitmq.yml index c5e0c903e1..799ebe6389 100644 --- a/core/src/epicli/data/common/ansible/playbooks/recovery_rabbitmq.yml +++ b/core/src/epicli/data/common/ansible/playbooks/recovery_rabbitmq.yml @@ -6,19 +6,25 @@ become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: rabbitmq_rabbitmq_etc - vars_files: - - roles/rabbitmq/vars/main.yml + - when: specification.components.rabbitmq.enabled | default(false) + block: + - include_vars: + file: roles/rabbitmq/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: rabbitmq_rabbitmq_etc - hosts: rabbitmq[0] become: true become_method: sudo serial: 1 tasks: - - import_role: - name: recovery - tasks_from: rabbitmq_rabbitmq_definitions - vars_files: - - roles/rabbitmq/vars/main.yml + - when: specification.components.rabbitmq.enabled | default(false) + block: + - include_vars: + file: roles/rabbitmq/vars/main.yml + name: component_vars + - import_role: + name: recovery + tasks_from: rabbitmq_rabbitmq_definitions diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/defaults/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/defaults/main.yml index c21c1bcd99..e28a0c933d 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/defaults/main.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/defaults/main.yml @@ -1,6 +1,6 @@ --- backup_dir: /epibackup backup_destination_dir: "{{ backup_dir }}/mounted" -backup_destination_host: "{{ groups.kubernetes_master.0 }}" +backup_destination_host: "{{ resolved_repository_hostname | default(groups.repository.0) }}" elasticsearch_snapshot_repository_name: epiphany elasticsearch_snapshot_repository_location: /var/lib/elasticsearch-snapshots diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/meta/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/meta/main.yml new file mode 100644 index 0000000000..745ba4d956 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: preflight_facts diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_archive.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_archive.yml new file mode 100644 index 0000000000..90be85bfd6 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_archive.yml @@ -0,0 +1,66 @@ +--- +# Invoke with (example): +#- set_fact: +# snapshot_prefix: "haproxy_etc" +# snapshot_name: "20200526-102034" +# dirs_to_archive: +# - /etc/haproxy/ +# - /etc/ssl/haproxy/ +# files_to_archive: +# - /var/lib/rabbitmq/definitions/definitions-{{ snapshot_name }}.json + +- name: Assert that the snapshot_prefix fact is defined and valid + assert: + that: + - snapshot_prefix is defined + - snapshot_prefix is string + - snapshot_prefix | length > 0 + fail_msg: The snapshot_prefix fact must be defined and must be a non-empty string. + +- name: Assert that the snapshot_name fact is defined and valid + assert: + that: + - snapshot_name is defined + - snapshot_name is string + - snapshot_name | length > 0 + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. + +- name: Reconstruct the paths_to_archive list + set_fact: + paths_to_archive: >- + {{ (dirs_to_archive_corrected + files_to_archive_corrected) | unique }} + vars: + # remove empty strings and make sure each path ends with single / + dirs_to_archive_corrected: >- + {{ dirs_to_archive | default([]) + | map('regex_replace', '//*$', '') + | select + | map('regex_replace', '$', '/') + | list }} + # remove empty strings + files_to_archive_corrected: >- + {{ files_to_archive | default([]) + | select + | list }} + +- name: Assert that the paths_to_archive list has at least one element + assert: + that: + - paths_to_archive | length > 0 + fail_msg: The paths_to_archive list must contain at least one element. + +- name: Reconstruct the snapshot_path + set_fact: + snapshot_path: "{{ backup_dir }}/{{ snapshot_prefix }}_{{ snapshot_name }}.tar.gz" + +- name: Ensure backup directory exists + file: + path: "{{ backup_dir }}/" + state: directory + +- name: Create the archive + archive: + dest: "{{ snapshot_path }}" + path: "{{ paths_to_archive }}" + format: gz + force_archive: true diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_checksum.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_checksum.yml new file mode 100644 index 0000000000..4460f815c8 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/create_snapshot_checksum.yml @@ -0,0 +1,32 @@ +--- +# Invoke with (example): +#- set_fact: +# snapshot_path: "{{ backup_dir }}/{{ snapshot_prefix }}_{{ snapshot_name }}.tar.gz" + +- name: Assert that the snapshot_path fact is defined and valid + assert: + that: + - snapshot_path is defined + - snapshot_path is string + - snapshot_path | length > 0 + fail_msg: The snapshot_path fact must be defined and must be a non-empty string. + +- name: Ensure backup directory exists + file: + path: "{{ backup_dir }}/" + state: directory + +- name: Calculate the checksum + stat: + path: "{{ snapshot_path }}" + get_attributes: false + get_checksum: true + get_mime: false + checksum_algorithm: sha1 + register: stat_checksum + +- name: Save the checksum + copy: + dest: "{{ snapshot_path }}.sha1" + content: | + {{ stat_checksum.stat.checksum }} {{ snapshot_path | basename }} diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/download_via_rsync.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/download_via_rsync.yml similarity index 99% rename from core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/download_via_rsync.yml rename to core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/download_via_rsync.yml index 4f6558856f..c2c0af1014 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/download_via_rsync.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/download_via_rsync.yml @@ -78,9 +78,9 @@ rsh: >- /usr/bin/ssh -S none -i {{ private_key_file.path }} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null loop: "{{ artifacts }}" - + - name: Remove copied artifacts from source - file: + file: path: "{{ item }}" state: absent loop: "{{ artifacts }}" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/download_via_ssh.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/download_via_ssh.yml similarity index 100% rename from core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/download_via_ssh.yml rename to core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/common/download_via_ssh.yml diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/kubernetes.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/kubernetes.yml index 026d4dc61e..40c0141794 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/kubernetes.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/kubernetes.yml @@ -1,4 +1,9 @@ --- +- name: Set helper facts + set_fact: + snapshot_name: >- + {{ ansible_date_time.iso8601_basic_short | replace('T','-') }} + - name: Ensure backup directory exists file: path: "{{ backup_dir }}/" @@ -76,38 +81,19 @@ dest: "{{ backup_temp_dir.path }}/" remote_src: true - - name: Get current timestamp - run_once: true - set_fact: - timestamp: "{{ ansible_date_time.iso8601_basic_short }}" - - - name: Create tar.gz archive - archive: - path: "{{ backup_temp_dir.path }}/" - dest: "{{ backup_dir }}/k8s_backup_{{ timestamp }}.tar.gz" - format: gz - - - name: Create checksum file - block: - - name: Calculate checksum - stat: - path: "{{ backup_dir }}/k8s_backup_{{ timestamp }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_k8s_backup_tar_gz + - name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "k8s_snapshot" + dirs_to_archive: + - "{{ backup_temp_dir.path }}/" - - name: Save checksum to a file - copy: - dest: "{{ stat_k8s_backup_tar_gz.stat.path }}.sha1" - content: | - {{ stat_k8s_backup_tar_gz.stat.checksum }} {{ stat_k8s_backup_tar_gz.stat.path | basename }} + - name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Export artifact list for the download_via_rsync backup copy method - set_fact: + - name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: artifacts: - - "{{ stat_k8s_backup_tar_gz.stat.path }}" - - "{{ stat_k8s_backup_tar_gz.stat.path }}.sha1" - - - import_tasks: download_via_rsync.yml + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/load_balancer_haproxy_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/load_balancer_haproxy_etc.yml new file mode 100644 index 0000000000..f9e2c77cba --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/load_balancer_haproxy_etc.yml @@ -0,0 +1,25 @@ +--- +- name: Set helper facts + set_fact: + snapshot_name: >- + {{ ansible_date_time.iso8601_basic_short | replace('T','-') }} + +- debug: var=snapshot_name + +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "haproxy_etc" + dirs_to_archive: + - /etc/haproxy/ + - /etc/ssl/haproxy/ + +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml + +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/loadbalancer_haproxy_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/loadbalancer_haproxy_etc.yml deleted file mode 100644 index 3b07bc9112..0000000000 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/loadbalancer_haproxy_etc.yml +++ /dev/null @@ -1,53 +0,0 @@ ---- -- name: Set helper facts - set_fact: - snapshot_name: >- - {{ ansible_date_time.iso8601_basic_short | replace('T','-') }} - -- debug: var=snapshot_name - -- name: Create and copy etc archive to backup destination - always: - - name: Delete etc archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz.sha1" - - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory - - - name: Create etc archive - archive: - dest: "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz" - path: - - /etc/haproxy/ # keep the / here! - - /etc/ssl/haproxy/ # keep the / here! - format: gz - - - name: Calculate checksum from etc archive - stat: - path: "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - - - name: Store etc archive checksum in a file - copy: - dest: "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_etc_archive.stat.checksum }} haproxy_etc_{{ snapshot_name }}.tar.gz - - - name: Transfer etc archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/haproxy_etc_{{ snapshot_name }}.tar.gz.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_etc.yml index e5fe2fbae6..b9e2bf79db 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_etc.yml @@ -1,52 +1,27 @@ --- -- name: Assert that the "snapshot_name" fact is defined and valid +- name: Assert that the snapshot_name fact is defined and valid assert: that: - snapshot_name is defined - snapshot_name is string - snapshot_name | length > 0 - fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string. + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. -- name: Create and copy etc archive to backup destination - always: - - name: Delete etc archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1" +- debug: var=snapshot_name - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "elasticsearch_etc" + dirs_to_archive: + - /etc/elasticsearch/ - - name: Create etc archive - archive: - dest: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz" - path: /etc/elasticsearch/ # keep the / here! - format: gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Calculate checksum from etc archive - stat: - path: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_elasticsearch_etc_archive - - - name: Store etc archive checksum in a file - copy: - dest: "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_elasticsearch_etc_archive.stat.checksum }} elasticsearch_etc_{{ snapshot_name }}.tar.gz - - - name: Transfer etc archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/elasticsearch_etc_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_snapshot.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_snapshot.yml index 50719b6858..b6e44e0ada 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_snapshot.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_elasticsearch_snapshot.yml @@ -66,46 +66,19 @@ loop: >- {{ uri_response.json.snapshots | map(attribute='snapshot') | reject('equalto', snapshot_name) | list }} -- name: Create and copy snapshot archive to backup destination - always: - - name: Delete snapshot archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1" - - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory - - - name: Create snapshot archive - archive: - dest: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz" - path: "{{ elasticsearch_snapshot_repository_location }}/*" - format: gz - - - name: Calculate checksum from snapshot archive - stat: - path: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_snapshot_archive +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "elasticsearch_snapshot" + dirs_to_archive: + - "{{ elasticsearch_snapshot_repository_location }}/" - - name: Store snapshot archive checksum in a file - copy: - dest: "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_snapshot_archive.stat.checksum }} elasticsearch_snapshot_{{ snapshot_name }}.tar.gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Transfer snapshot archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/elasticsearch_snapshot_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_kibana_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_kibana_etc.yml index 911d544fcd..4b774e7d4f 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_kibana_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/logging_kibana_etc.yml @@ -1,52 +1,27 @@ --- -- name: Assert that the "snapshot_name" fact is defined and valid +- name: Assert that the snapshot_name fact is defined and valid assert: that: - snapshot_name is defined - snapshot_name is string - snapshot_name | length > 0 - fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string. + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. -- name: Create and copy etc archive to backup destination - always: - - name: Delete etc archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1" +- debug: var=snapshot_name - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "kibana_etc" + dirs_to_archive: + - /etc/kibana/ - - name: Create etc archive - archive: - dest: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz" - path: /etc/kibana/ # keep the / here! - format: gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Calculate checksum from etc archive - stat: - path: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - - - name: Store etc archive checksum in a file - copy: - dest: "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_etc_archive.stat.checksum }} kibana_etc_{{ snapshot_name }}.tar.gz - - - name: Transfer etc archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/kibana_etc_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_grafana_data.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_grafana_data.yml index dfa3c22b20..bb5facc006 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_grafana_data.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_grafana_data.yml @@ -1,52 +1,27 @@ --- -- name: Assert that the "snapshot_name" fact is defined and valid +- name: Assert that the snapshot_name fact is defined and valid assert: that: - snapshot_name is defined - snapshot_name is string - snapshot_name | length > 0 - fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string. + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. -- name: Create and copy data archive to backup destination - always: - - name: Delete data archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz.sha1" +- debug: var=snapshot_name - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "grafana_data" + dirs_to_archive: + - "{{ component_vars.specification.grafana_data_dir }}/" - - name: Create data archive - archive: - dest: "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz" - path: "{{ specification.grafana_data_dir }}/" # keep the / here! - format: gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Calculate checksum from data archive - stat: - path: "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_data_archive - - - name: Store data archive checksum in a file - copy: - dest: "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_data_archive.stat.checksum }} grafana_data_{{ snapshot_name }}.tar.gz - - - name: Transfer data archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/grafana_data_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_etc.yml index 75f41d2d27..0eec76e2d8 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_etc.yml @@ -1,52 +1,27 @@ --- -- name: Assert that the "snapshot_name" fact is defined and valid +- name: Assert that the snapshot_name fact is defined and valid assert: that: - snapshot_name is defined - snapshot_name is string - snapshot_name | length > 0 - fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string. + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. -- name: Create and copy etc archive to backup destination - always: - - name: Delete etc archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz.sha1" +- debug: var=snapshot_name - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "prometheus_etc" + dirs_to_archive: + - /etc/prometheus/ - - name: Create etc archive - archive: - dest: "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz" - path: /etc/prometheus/ # keep the / here! - format: gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Calculate checksum from etc archive - stat: - path: "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_prometheus_etc_archive - - - name: Store etc archive checksum in a file - copy: - dest: "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_prometheus_etc_archive.stat.checksum }} prometheus_etc_{{ snapshot_name }}.tar.gz - - - name: Transfer etc archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/prometheus_etc_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_snapshot.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_snapshot.yml index f67baffd6c..3f7a253423 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_snapshot.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/monitoring_prometheus_snapshot.yml @@ -3,64 +3,50 @@ set_fact: prometheus_endpoint: >- http://{{ ansible_default_ipv4.address }}:9090 + snapshot_name: >- + {{ ansible_date_time.iso8601_basic_short | replace('T','-') }} vars: uri_template: &uri body_format: json +- debug: var=snapshot_name + - name: Trigger snapshot creation uri: <<: *uri url: "{{ prometheus_endpoint }}/api/v1/admin/tsdb/snapshot" method: POST register: uri_response + until: uri_response is success + retries: 12 + delay: 5 -- name: Extract snapshot name +- name: Get the prometheus_snapshot_name set_fact: - snapshot_name: "{{ uri_response.json.data.name }}" - snapshot_directory: "{{ specification.storage.data_directory }}/snapshots" + prometheus_snapshot_name: "{{ uri_response.json.data.name }}" -- debug: var=snapshot_name +- debug: var=prometheus_snapshot_name -- name: Create and copy snapshot archive to backup destination +- name: Create, transfer and cleanup snapshot always: - - name: Delete snapshot archive (cleanup) + - name: Remove snapshot directory (cleanup) file: - path: "{{ item }}" + path: "{{ component_vars.specification.storage.data_directory }}/snapshots/{{ prometheus_snapshot_name }}/" state: absent - loop: - - "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz.sha1" - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory - - name: Create snapshot archive - archive: - dest: "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz" - path: "{{ snapshot_directory }}/{{ snapshot_name }}/" # keep the / here! - format: gz - - - name: Calculate checksum from snapshot archive - stat: - path: "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_snapshot_archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "prometheus_snapshot" + dirs_to_archive: + - "{{ component_vars.specification.storage.data_directory }}/snapshots/{{ prometheus_snapshot_name }}/" - - name: Store snapshot archive checksum in a file - copy: - dest: "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_snapshot_archive.stat.checksum }} prometheus_snapshot_{{ snapshot_name }}.tar.gz + - name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Transfer snapshot archive via rsync - import_tasks: download_via_rsync.yml + - name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml vars: artifacts: - - "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/prometheus_snapshot_{{ snapshot_name }}.tar.gz.sha1" + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/postgresql.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/postgresql.yml index c3c5c590df..0c3f7ec230 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/postgresql.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/postgresql.yml @@ -36,7 +36,8 @@ path: "/var/tmp/postgresql_backup_{{ timestamp }}/" state: absent -- name: Set artifact to copy - set_fact: +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: artifacts: - /var/tmp/postgresql_backup_{{ timestamp }}.tar diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_definitions.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_definitions.yml index 597540b1b8..47cc4012dd 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_definitions.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_definitions.yml @@ -21,58 +21,30 @@ creates: /usr/local/bin/rabbitmqadmin executable: /bin/bash -- name: Ensure a folder to hold definitions in exists +- name: Ensure the destination directory for definitions exists file: path: /var/lib/rabbitmq/definitions/ state: directory -- name: Store definitions json file +- name: Save definitions in a json file shell: | rabbitmqadmin export /var/lib/rabbitmq/definitions/definitions-{{ snapshot_name }}.json args: executable: /bin/bash -- name: Create and copy definitions archive to backup destination - always: - - name: Delete definitions archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz.sha1" - - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory - - - name: Create definitions archive - archive: - dest: "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz" - path: /var/lib/rabbitmq/definitions/definitions-{{ snapshot_name }}.json - format: gz - force_archive: true - - - name: Calculate checksum from definitions archive - stat: - path: "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_rabbitmq_definitions_archive - - - name: Store definitions archive checksum in a file - copy: - dest: "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_rabbitmq_definitions_archive.stat.checksum }} rabbitmq_definitions_{{ snapshot_name }}.tar.gz - - - name: Transfer definitions archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/rabbitmq_definitions_{{ snapshot_name }}.tar.gz.sha1" +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "rabbitmq_definitions" + files_to_archive: + - /var/lib/rabbitmq/definitions/definitions-{{ snapshot_name }}.json + +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml + +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_etc.yml index ebee94e0dc..728297b41c 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/backup/tasks/rabbitmq_rabbitmq_etc.yml @@ -1,52 +1,27 @@ --- -- name: Assert that the "snapshot_name" fact is defined and valid +- name: Assert that the snapshot_name fact is defined and valid assert: that: - snapshot_name is defined - snapshot_name is string - snapshot_name | length > 0 - fail_msg: The "snapshot_name" fact must be defined and must be a non-empty string. + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. -- name: Create and copy etc archive to backup destination - always: - - name: Delete etc archive (cleanup) - file: - path: "{{ item }}" - state: absent - loop: - - "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz.sha1" +- debug: var=snapshot_name - block: - - name: Ensure backup dir exists - file: - path: "{{ backup_dir }}/" - state: directory +- name: Create snapshot archive + import_tasks: common/create_snapshot_archive.yml + vars: + snapshot_prefix: "rabbitmq_etc" + dirs_to_archive: + - /etc/rabbitmq/ - - name: Create etc archive - archive: - dest: "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz" - path: /etc/rabbitmq/ # keep the / here! - format: gz +- name: Create snapshot checksum + import_tasks: common/create_snapshot_checksum.yml - - name: Calculate checksum from etc archive - stat: - path: "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_rabbitmq_etc_archive - - - name: Store etc archive checksum in a file - copy: - dest: "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz.sha1" - content: | - {{ stat_rabbitmq_etc_archive.stat.checksum }} rabbitmq_etc_{{ snapshot_name }}.tar.gz - - - name: Transfer etc archive via rsync - import_tasks: download_via_rsync.yml - vars: - artifacts: - - "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz" - - "{{ backup_dir }}/rabbitmq_etc_{{ snapshot_name }}.tar.gz.sha1" +- name: Transfer artifacts via rsync + import_tasks: common/download_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/defaults/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/defaults/main.yml index 23e668a058..80fb354149 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/defaults/main.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/defaults/main.yml @@ -1,6 +1,6 @@ --- recovery_dir: /epibackup recovery_source_dir: "{{ recovery_dir }}/mounted" -recovery_source_host: "{{ groups.kubernetes_master.0 }}" +recovery_source_host: "{{ resolved_repository_hostname | default(groups.repository.0) }}" elasticsearch_snapshot_repository_name: epiphany elasticsearch_snapshot_repository_location: /var/lib/elasticsearch-snapshots diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/meta/main.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/meta/main.yml new file mode 100644 index 0000000000..745ba4d956 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: preflight_facts diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/clear_directories.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/clear_directories.yml new file mode 100644 index 0000000000..9d79918862 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/clear_directories.yml @@ -0,0 +1,45 @@ +--- +# Invoke with (example): +#- set_fact: +# dirs_to_clear: +# - /etc/haproxy/ +# - /etc/ssl/haproxy/ + +- name: Assert that the dirs_to_clear fact is defined and valid + assert: + that: + - dirs_to_clear is defined + - dirs_to_clear is sequence + - dirs_to_clear | length > 0 + fail_msg: The dirs_to_clear fact must be defined and must be a non-empty list. + +- name: Assert that the dirs_to_clear fact does not contain empty strings + assert: + that: + - (dirs_to_clear | length) == (dirs_to_clear_cleaned | length) + fail_msg: The dirs_to_clear fact must not contain empty strings. + vars: + # remove empty strings + dirs_to_clear_cleaned: >- + {{ dirs_to_clear | select | list }} + +- name: Find everything in target directories + find: + paths: "{{ dirs_to_clear_corrected }}" + patterns: "*" + file_type: any + recurse: false + register: find_everything_in_target_directories + vars: + # make sure each path ends with single / + dirs_to_clear_corrected: >- + {{ dirs_to_clear | map('regex_replace', '//*$', '') + | map('regex_replace', '$', '/') + | list }} + +- name: Remove everything from target directories + file: + path: "{{ item }}" + state: absent + loop: >- + {{ find_everything_in_target_directories.files | map(attribute='path') | list }} diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/find_snapshot_archive.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/find_snapshot_archive.yml new file mode 100644 index 0000000000..9755e39b66 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/find_snapshot_archive.yml @@ -0,0 +1,56 @@ +--- +# Invoke with (example): +#- set_fact: +# snapshot_prefix: "rabbitmq_etc" +# snapshot_name: "20200526-102034" + +- name: Assert that the snapshot_prefix fact is defined and valid + assert: + that: + - snapshot_prefix is defined + - snapshot_prefix is string + - snapshot_prefix | length > 0 + fail_msg: The snapshot_prefix fact must be defined and must be a non-empty string. + +- name: Assert that the snapshot_name fact is defined and valid + assert: + that: + - snapshot_name is defined + - snapshot_name is string + - snapshot_name | length > 0 + fail_msg: The snapshot_name fact must be defined and must be a non-empty string. + +- debug: var=snapshot_name + +- name: Decide what should be the search pattern + set_fact: + search_pattern: >- + {{ (snapshot_name != "latest") | ternary( + snapshot_prefix ~ "_" ~ snapshot_name ~ ".tar.gz", + snapshot_prefix ~ "_" ~ "*-*" ~ ".tar.gz" + ) }} + +- debug: var=search_pattern + +- name: Find all matching archives + delegate_to: "{{ recovery_source_host }}" + find: + paths: "{{ recovery_source_dir }}/" + patterns: "{{ search_pattern }}" + file_type: file + recurse: false + register: find_archives + +- name: Assert that there are archives available + assert: + that: find_archives.matched > 0 + fail_msg: No etc archives found. + +- name: Pick the newest archive (if many) + set_fact: + snapshot_path: >- + {{ find_archives.files | map(attribute='path') | max }} + +- name: Assert that the snapshot_path fact is not an empty string + assert: + that: snapshot_path | length > 0 diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/upload_via_rsync.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/upload_via_rsync.yml similarity index 100% rename from core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/upload_via_rsync.yml rename to core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/upload_via_rsync.yml diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/upload_via_ssh.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/upload_via_ssh.yml similarity index 100% rename from core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/upload_via_ssh.yml rename to core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/upload_via_ssh.yml diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/verify_snapshot_checksum.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/verify_snapshot_checksum.yml new file mode 100644 index 0000000000..d7f55f4a50 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/common/verify_snapshot_checksum.yml @@ -0,0 +1,31 @@ +--- +# Invoke with (example): +#- set_fact: +# snapshot_path: "rabbitmq_etc_20200526-102034.tar.gz" + +- name: Assert that the snapshot_path fact is defined and valid + assert: + that: + - snapshot_path is defined + - snapshot_path is string + - snapshot_path | length > 0 + fail_msg: The snapshot_path fact must be defined and must be a non-empty string. + +- name: Slurp checksum from file + slurp: + path: "{{ recovery_dir }}/{{ snapshot_path | basename }}.sha1" + register: slurp_checksum + +- name: Calculate archive checksum + stat: + path: "{{ recovery_dir }}/{{ snapshot_path | basename }}" + get_attributes: false + get_checksum: true + get_mime: false + checksum_algorithm: sha1 + register: stat_archive + +- name: Compare checksums + assert: + that: (slurp_checksum.content | b64decode | trim).startswith(stat_archive.stat.checksum) + fail_msg: Checksums do not match. diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/kubernetes.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/kubernetes.yml index e776fbafa3..11faa4de1b 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/kubernetes.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/kubernetes.yml @@ -1,25 +1,19 @@ --- -- name: Find all kubernetes backups - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "k8s_backup_*.tar.gz" - file_type: file - recurse: false - register: find_kubernetes_snapshots - -- name: Provide parameters for the upload_via_rsync restore method - set_fact: - artifacts: - - "{{ newest_kubernetes_snapshot }}" - - "{{ newest_kubernetes_snapshot }}.sha1" +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml vars: - newest_kubernetes_snapshot: >- - {{ find_kubernetes_snapshots.files | map(attribute='path') | max }} + snapshot_prefix: "k8s_backup" + snapshot_name: "{{ specification.components.kubernetes.snapshot_name }}" -- debug: var=artifacts +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- import_tasks: upload_via_rsync.yml +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - fail: msg: This is a dangerous procedure that is currently disabled because it needs refactoring! diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/load_balancer_haproxy_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/load_balancer_haproxy_etc.yml new file mode 100644 index 0000000000..872e6c4534 --- /dev/null +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/load_balancer_haproxy_etc.yml @@ -0,0 +1,39 @@ +--- +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "haproxy_etc" + snapshot_name: "{{ specification.components.load_balancer.snapshot_name }}" + +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml + vars: + artifacts: + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" + +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml + +- name: Stop haproxy service + systemd: + name: haproxy + state: stopped + +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - /etc/haproxy/ + - /etc/ssh/haproxy/ + +- name: Extract the archive + unarchive: + dest: /etc/ + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" + remote_src: true + +- name: Start haproxy service + systemd: + name: haproxy + state: started diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/loadbalancer_haproxy_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/loadbalancer_haproxy_etc.yml deleted file mode 100644 index e03b66499d..0000000000 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/loadbalancer_haproxy_etc.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- -- name: Find all haproxy etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "haproxy_etc_*-*.tar.gz" - file_type: file - recurse: false - register: find_etc_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_etc_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_etc_archive_path: >- - {{ find_etc_archives.files | map(attribute='path') | max }} - -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml - vars: - artifacts: - - "{{ newest_etc_archive_path }}" - - "{{ newest_etc_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}.sha1" - register: slurp_etc_archive_checksum - -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - -- name: Compare etc archive checksums - assert: - that: (slurp_etc_archive_checksum.content | b64decode | trim).startswith(stat_etc_archive.stat.checksum) - fail_msg: Checksums do not match. - -- name: Stop haproxy service - systemd: - name: haproxy - state: stopped - -- name: Find everything in the etc directories - find: - paths: - - /etc/haproxy/ - - /etc/ssh/haproxy/ - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_etc_directory - -- name: Remove all haproxy etc files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_etc_directory.files | map(attribute='path') | list }} - -- name: Extract etc archive to etc directory - unarchive: - dest: /etc/ - src: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - remote_src: true - -- name: Start haproxy service - systemd: - name: haproxy - state: started diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_etc.yml index 44e5425e47..7c81954bf5 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_etc.yml @@ -1,73 +1,35 @@ --- -- name: Find all elasticsearch etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "elasticsearch_etc_*-*.tar.gz" - file_type: file - recurse: false - register: find_elasticsearch_etc_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_elasticsearch_etc_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_etc_archive_path: >- - {{ find_elasticsearch_etc_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "elasticsearch_etc" + snapshot_name: "{{ specification.components.logging.snapshot_name }}" -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_etc_archive_path }}" - - "{{ newest_etc_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}.sha1" - register: slurp_etc_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - -- name: Compare etc archive checksums - assert: - that: (slurp_etc_archive_checksum.content | b64decode | trim).startswith(stat_etc_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop elasticsearch service systemd: name: elasticsearch state: stopped -- name: Find everything in the etc directory - find: - paths: /etc/elasticsearch/ - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_etc_directory - -- name: Remove all elasticsearch etc config files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_etc_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - /etc/elasticsearch/ -- name: Extract etc archive to etc directory +- name: Extract the archive unarchive: dest: /etc/elasticsearch/ - src: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start elasticsearch service diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_snapshot.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_snapshot.yml index 3665fc589b..bb4cb2b524 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_snapshot.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_elasticsearch_snapshot.yml @@ -20,83 +20,45 @@ retries: 12 delay: 5 -- name: Find all elasticsearch snapshots - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "elasticsearch_snapshot_*-*.tar.gz" - file_type: file - recurse: false - register: find_elasticsearch_snapshots - -- name: Do sanity check if there are any snapshots available - assert: - that: find_elasticsearch_snapshots.matched > 0 - fail_msg: No snapshots found. - -- name: Pick the newest snapshot - set_fact: - newest_snapshot_path: >- - {{ find_elasticsearch_snapshots.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "elasticsearch_snapshot" + snapshot_name: "{{ specification.components.logging.snapshot_name }}" -- name: Transfer snapshot archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_snapshot_path }}" - - "{{ newest_snapshot_path }}.sha1" - -- name: Slurp snapshot archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}.sha1" - register: slurp_snapshot_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from snapshot archive - stat: - path: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_snapshot_archive +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml -- name: Compare snapshot archive checksums - assert: - that: (slurp_snapshot_archive_checksum.content | b64decode | trim).startswith(stat_snapshot_archive.stat.checksum) - fail_msg: Checksums do not match. - -- name: Find everything in the snapshot directory - find: - paths: "{{ elasticsearch_snapshot_repository_location }}/" - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_snapshot_directory - -- name: Remove everything from snapshot directory - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_snapshot_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - "{{ elasticsearch_snapshot_repository_location }}/" -- name: Extract snapshot archive to snapshot directory +- name: Extract the archive unarchive: dest: "{{ elasticsearch_snapshot_repository_location }}/" - src: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true -- name: Change snapshot directory owner +- name: Change snapshot directory permissions file: path: "{{ elasticsearch_snapshot_repository_location }}/" owner: elasticsearch group: elasticsearch recurse: true -- name: Extract newest snapshot name +- name: Reconstruct the snapshot_name set_fact: snapshot_name: >- - {{ newest_snapshot_path | basename | regex_replace('^elasticsearch_snapshot_(.*).tar.gz$', '\1') }} + {{ snapshot_path | basename | regex_replace('^elasticsearch_snapshot_(.*).tar.gz$', '\1') }} - debug: var=snapshot_name @@ -106,7 +68,7 @@ url: "{{ elasticsearch_endpoint }}/_all/_close" method: POST -- name: Restore newest snapshot +- name: Restore the snapshot uri: <<: *uri url: "{{ elasticsearch_endpoint }}/_snapshot/{{ elasticsearch_snapshot_repository_name }}/{{ snapshot_name }}/_restore" diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_kibana_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_kibana_etc.yml index 64872019b0..3792303795 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_kibana_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/logging_kibana_etc.yml @@ -1,73 +1,35 @@ --- -- name: Find all kibana etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "kibana_etc_*-*.tar.gz" - file_type: file - recurse: false - register: find_etc_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_etc_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_etc_archive_path: >- - {{ find_etc_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "kibana_etc" + snapshot_name: "{{ specification.components.logging.snapshot_name }}" -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_etc_archive_path }}" - - "{{ newest_etc_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}.sha1" - register: slurp_etc_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - -- name: Compare etc archive checksums - assert: - that: (slurp_etc_archive_checksum.content | b64decode | trim).startswith(stat_etc_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop kibana service systemd: name: kibana state: stopped -- name: Find everything in the etc directory - find: - paths: /etc/kibana/ - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_etc_directory - -- name: Remove all kibana etc files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_etc_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - /etc/kibana/ -- name: Extract etc archive to etc directory +- name: Extract the archive unarchive: dest: /etc/kibana/ - src: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start kibana service diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_grafana_data.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_grafana_data.yml index 429c94f87d..26e7d9477f 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_grafana_data.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_grafana_data.yml @@ -1,73 +1,35 @@ --- -- name: Find all grafana data archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "grafana_data_*-*.tar.gz" - file_type: file - recurse: false - register: find_data_archives - -- name: Do sanity check if there are any data archives available - assert: - that: find_data_archives.matched > 0 - fail_msg: No data archives found. - -- name: Pick the newest data archive - set_fact: - newest_data_archive_path: >- - {{ find_data_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "grafana_data" + snapshot_name: "{{ specification.components.monitoring.snapshot_name }}" -- name: Transfer data archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_data_archive_path }}" - - "{{ newest_data_archive_path }}.sha1" - -- name: Slurp data archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_data_archive_path | basename }}.sha1" - register: slurp_data_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from data archive - stat: - path: "{{ recovery_dir }}/{{ newest_data_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_data_archive - -- name: Compare data archive checksums - assert: - that: (slurp_data_archive_checksum.content | b64decode | trim).startswith(stat_data_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop grafana service systemd: name: grafana-server state: stopped -- name: Find everything in the data directory - find: - paths: "{{ specification.grafana_data_dir }}/" - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_data_directory - -- name: Remove all grafana data files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_data_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - "{{ component_vars.specification.grafana_data_dir }}/" -- name: Extract data archive to data directory +- name: Extract the archive unarchive: - dest: "{{ specification.grafana_data_dir }}/" - src: "{{ recovery_dir }}/{{ newest_data_archive_path | basename }}" + dest: "{{ component_vars.specification.grafana_data_dir }}/" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start grafana service diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_etc.yml index a06a64d84b..30c68c4992 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_etc.yml @@ -1,73 +1,35 @@ --- -- name: Find all prometheus etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "prometheus_etc_*-*.tar.gz" - file_type: file - recurse: false - register: find_prometheus_etc_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_prometheus_etc_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_etc_archive_path: >- - {{ find_prometheus_etc_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "prometheus_etc" + snapshot_name: "{{ specification.components.monitoring.snapshot_name }}" -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_etc_archive_path }}" - - "{{ newest_etc_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}.sha1" - register: slurp_etc_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - -- name: Compare etc archive checksums - assert: - that: (slurp_etc_archive_checksum.content | b64decode | trim).startswith(stat_etc_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop prometheus service systemd: name: prometheus state: stopped -- name: Find everything in the etc directory - find: - paths: /etc/prometheus/ - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_etc_directory - -- name: Remove all prometheus etc config files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_etc_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - /etc/prometheus/ -- name: Extract etc archive to etc directory +- name: Extract the archive unarchive: dest: /etc/prometheus/ - src: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start prometheus service diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_snapshot.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_snapshot.yml index 60e78e06bb..fb693c26e6 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_snapshot.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/monitoring_prometheus_snapshot.yml @@ -1,73 +1,35 @@ --- -- name: Find all prometheus snapshots - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "prometheus_snapshot_*-*.tar.gz" - file_type: file - recurse: false - register: find_prometheus_snapshots - -- name: Do sanity check if there are any snapshots available - assert: - that: find_prometheus_snapshots.matched > 0 - fail_msg: No snapshots found. - -- name: Pick the newest snapshot - set_fact: - newest_snapshot_path: >- - {{ find_prometheus_snapshots.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "prometheus_snapshot" + snapshot_name: "{{ specification.components.monitoring.snapshot_name }}" -- name: Transfer snapshot archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_snapshot_path }}" - - "{{ newest_snapshot_path }}.sha1" - -- name: Slurp snapshot archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}.sha1" - register: slurp_snapshot_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from snapshot archive - stat: - path: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_snapshot_archive - -- name: Compare snapshot archive checksums - assert: - that: (slurp_snapshot_archive_checksum.content | b64decode | trim).startswith(stat_snapshot_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop prometheus service systemd: name: prometheus state: stopped -- name: Find everything in the data directory - find: - paths: "{{ specification.storage.data_directory }}/" - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_data_directory - -- name: Remove all prometheus data - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_data_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - "{{ component_vars.specification.storage.data_directory }}/" -- name: Extract snapshot archive to data directory +- name: Extract the archive unarchive: - dest: "{{ specification.storage.data_directory }}/" - src: "{{ recovery_dir }}/{{ newest_snapshot_path | basename }}" + dest: "{{ component_vars.specification.storage.data_directory }}/" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start prometheus service diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_definitions.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_definitions.yml index 6bc5dfb6e3..b09da315ee 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_definitions.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_definitions.yml @@ -1,58 +1,29 @@ --- -- name: Find all rabbitmq etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "rabbitmq_definitions_*-*.tar.gz" - file_type: file - recurse: false - register: find_rabbitmq_definitions_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_rabbitmq_definitions_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_definitions_archive_path: >- - {{ find_rabbitmq_definitions_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "rabbitmq_definitions" + snapshot_name: "{{ specification.components.rabbitmq.snapshot_name }}" -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_definitions_archive_path }}" - - "{{ newest_definitions_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_definitions_archive_path | basename }}.sha1" - register: slurp_definitions_archive_checksum - -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_definitions_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_definitions_archive + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Compare etc archive checksums - assert: - that: (slurp_definitions_archive_checksum.content | b64decode | trim).startswith(stat_definitions_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Ensure a folder to hold definitions in exists file: path: /var/lib/rabbitmq/definitions/ state: directory -- name: Extract etc archive to etc directory +- name: Extract the archive unarchive: dest: /var/lib/rabbitmq/definitions/ - src: "{{ recovery_dir }}/{{ newest_definitions_archive_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Ensure management api is enabled @@ -70,11 +41,15 @@ creates: /usr/local/bin/rabbitmqadmin executable: /bin/bash +- name: Reconstruct the snapshot_name + set_fact: + snapshot_name: >- + {{ snapshot_path | basename | regex_replace('^rabbitmq_definitions_(.*).tar.gz$', '\1') }} + +- debug: var=snapshot_name + - name: Import definitions json file shell: | rabbitmqadmin import /var/lib/rabbitmq/definitions/definitions-{{ snapshot_name }}.json args: executable: /bin/bash - vars: - snapshot_name: >- - {{ newest_definitions_archive_path | basename | regex_replace('^rabbitmq_definitions_(.*).tar.gz$', '\1') }} diff --git a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_etc.yml b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_etc.yml index bd72cdee5d..b191db3dd2 100644 --- a/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_etc.yml +++ b/core/src/epicli/data/common/ansible/playbooks/roles/recovery/tasks/rabbitmq_rabbitmq_etc.yml @@ -1,73 +1,35 @@ --- -- name: Find all rabbitmq etc archives - delegate_to: "{{ recovery_source_host }}" - find: - paths: "{{ recovery_source_dir }}/" - patterns: "rabbitmq_etc_*-*.tar.gz" - file_type: file - recurse: false - register: find_rabbitmq_etc_archives - -- name: Do sanity check if there are any etc archives available - assert: - that: find_rabbitmq_etc_archives.matched > 0 - fail_msg: No etc archives found. - -- name: Pick the newest etc archive - set_fact: - newest_etc_archive_path: >- - {{ find_rabbitmq_etc_archives.files | map(attribute='path') | max }} +- name: Find snapshot archive + import_tasks: common/find_snapshot_archive.yml + vars: + snapshot_prefix: "rabbitmq_etc" + snapshot_name: "{{ specification.components.rabbitmq.snapshot_name }}" -- name: Transfer etc archive via rsync - import_tasks: upload_via_rsync.yml +- name: Transfer the archive via rsync + import_tasks: common/upload_via_rsync.yml vars: artifacts: - - "{{ newest_etc_archive_path }}" - - "{{ newest_etc_archive_path }}.sha1" - -- name: Slurp etc archive checksum from file - slurp: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}.sha1" - register: slurp_etc_archive_checksum + - "{{ snapshot_path }}" + - "{{ snapshot_path }}.sha1" -- name: Calculate checksum from etc archive - stat: - path: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" - get_attributes: false - get_checksum: true - get_mime: false - checksum_algorithm: sha1 - register: stat_etc_archive - -- name: Compare etc archive checksums - assert: - that: (slurp_etc_archive_checksum.content | b64decode | trim).startswith(stat_etc_archive.stat.checksum) - fail_msg: Checksums do not match. +- name: Verify snapshot checksum + import_tasks: common/verify_snapshot_checksum.yml - name: Stop rabbitmq service systemd: name: rabbitmq-server state: stopped -- name: Find everything in the etc directory - find: - paths: /etc/rabbitmq/ - patterns: "*" - file_type: any - recurse: false - register: find_everything_in_etc_directory - -- name: Remove all rabbitmq etc config files - file: - path: "{{ item }}" - state: absent - loop: >- - {{ find_everything_in_etc_directory.files | map(attribute='path') | list }} +- name: Clear directories + import_tasks: common/clear_directories.yml + vars: + dirs_to_clear: + - /etc/rabbitmq/ -- name: Extract etc archive to etc directory +- name: Extract the archive unarchive: dest: /etc/rabbitmq/ - src: "{{ recovery_dir }}/{{ newest_etc_archive_path | basename }}" + src: "{{ recovery_dir }}/{{ snapshot_path | basename }}" remote_src: true - name: Start rabbitmq service diff --git a/core/src/epicli/data/common/defaults/configuration/backup.yml b/core/src/epicli/data/common/defaults/configuration/backup.yml new file mode 100644 index 0000000000..74b479009d --- /dev/null +++ b/core/src/epicli/data/common/defaults/configuration/backup.yml @@ -0,0 +1,17 @@ +kind: configuration/backup +title: Backup Config +name: default +specification: + components: + kubernetes: + enabled: false + load_balancer: + enabled: false + logging: + enabled: false + monitoring: + enabled: false + postgresql: + enabled: false + rabbitmq: + enabled: false diff --git a/core/src/epicli/data/common/defaults/configuration/feature-mapping.yml b/core/src/epicli/data/common/defaults/configuration/feature-mapping.yml index 71fe44e0d8..bf5463e212 100644 --- a/core/src/epicli/data/common/defaults/configuration/feature-mapping.yml +++ b/core/src/epicli/data/common/defaults/configuration/feature-mapping.yml @@ -48,7 +48,7 @@ specification: - name: haproxy-exporter enabled: yes - name: helm - enabled: yes + enabled: yes - name: vault enabled: yes - name: applications @@ -140,4 +140,3 @@ specification: - node-exporter - filebeat - firewall - diff --git a/core/src/epicli/data/common/defaults/configuration/recovery.yml b/core/src/epicli/data/common/defaults/configuration/recovery.yml new file mode 100644 index 0000000000..a9d531ac60 --- /dev/null +++ b/core/src/epicli/data/common/defaults/configuration/recovery.yml @@ -0,0 +1,23 @@ +kind: configuration/recovery +title: Recovery Config +name: default +specification: + components: + kubernetes: + enabled: false + snapshot_name: latest + load_balancer: + enabled: false + snapshot_name: latest + logging: + enabled: false + snapshot_name: latest + monitoring: + enabled: false + snapshot_name: latest + postgresql: + enabled: false + snapshot_name: latest + rabbitmq: + enabled: false + snapshot_name: latest diff --git a/core/src/epicli/data/common/validation/configuration/backup.yml b/core/src/epicli/data/common/validation/configuration/backup.yml new file mode 100644 index 0000000000..89807aa970 --- /dev/null +++ b/core/src/epicli/data/common/validation/configuration/backup.yml @@ -0,0 +1 @@ +$ref: '#/definitions/unvalidated_specification' \ No newline at end of file diff --git a/core/src/epicli/data/common/validation/configuration/recovery.yml b/core/src/epicli/data/common/validation/configuration/recovery.yml new file mode 100644 index 0000000000..89807aa970 --- /dev/null +++ b/core/src/epicli/data/common/validation/configuration/recovery.yml @@ -0,0 +1 @@ +$ref: '#/definitions/unvalidated_specification' \ No newline at end of file diff --git a/core/src/epicli/tests/helpers/test_argparse_helpers.py b/core/src/epicli/tests/helpers/test_argparse_helpers.py new file mode 100644 index 0000000000..464badbd31 --- /dev/null +++ b/core/src/epicli/tests/helpers/test_argparse_helpers.py @@ -0,0 +1,70 @@ +from cli.helpers.argparse_helpers import get_component_parser, components_to_dict +from cli.helpers.ObjDict import ObjDict + + +def test_get_component_parser(): + """Test component parser.""" + + component_string = 'aaa , bbb, ,, ,' + available_components = {'aaa', 'bbb', 'ccc'} + + parser = get_component_parser(available_components) + parsed_components = parser(component_string) + + assert isinstance(parsed_components, set) + assert parsed_components != available_components + assert parsed_components.issubset(available_components) + + +def test_get_component_parser_special_arguments(): + """Test component parser (special arguments).""" + + available_components = {'aaa', 'bbb', 'ccc'} + + parser = get_component_parser(available_components) + + parsed_components = parser(', ,, , , all, ,, ,') + assert parsed_components == available_components + + parsed_components = parser(', ,, , , none, ,, ,') + assert parsed_components == set() + + +def test_get_component_parser_incorrect_value(): + """Test component parser (incorrect value).""" + + component_string = 'aaa , bbb, ,xxx, ,' + available_components = {'aaa', 'bbb', 'ccc'} + + try: + parser = get_component_parser(available_components) + parser(component_string) + return False + except: + return True + + +def test_components_to_dict(): + """Test conversion to {component -> boolean} ObjDict.""" + + available_components = {'aaa', 'bbb', 'ccc'} + parsed_components = {'bbb'} + + component_dict = components_to_dict(parsed_components, available_components) + + assert component_dict.aaa is False + assert component_dict.bbb is True + assert component_dict.ccc is False + + +def test_components_to_dict_incorrect_value(): + """Test conversion to {component -> boolean} ObjDict (incorrect value).""" + + available_components = {'aaa', 'bbb', 'ccc'} + parsed_components = {'xxx'} + + try: + components_to_dict(parsed_components, available_components) + return False + except: + return True