Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Epicli backup/restore improvements and refactor #1299

122 changes: 107 additions & 15 deletions core/src/epicli/cli/engine/PatchEngine.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,35 @@
import os

from cli.helpers.Config import Config
from cli.helpers.Log import Log
from cli.helpers.Step import Step

from cli.helpers.build_saver import get_build_path, get_inventory_path_for_build
from cli.helpers.build_saver import copy_files_recursively, copy_file
from cli.helpers.yaml_helpers import safe_load_all, dump
from cli.helpers.doc_list_helpers import select_single
from cli.helpers.argparse_helpers import components_to_dict

from cli.engine.schema.DefaultMerger import DefaultMerger
from cli.engine.schema.SchemaValidator import SchemaValidator

from cli.engine.ansible.AnsibleCommand import AnsibleCommand
from cli.engine.ansible.AnsibleRunner import AnsibleRunner
from cli.helpers.Config import Config
from cli.helpers.build_saver import copy_files_recursively, copy_file, get_inventory_path_for_build


class PatchEngine(Step):
"""Perform backup and recovery operations."""

def __init__(self, input_data):
super().__init__(__name__)
self.build_directory = input_data.build_directory
self.components = input_data.components
self.file = input_data.file
self.parsed_components = None if input_data.components is None else set(input_data.components)
self.component_dict = dict()
self.input_docs = list()
self.cluster_model = None
self.backup_doc = None
self.recovery_doc = None
self.build_directory = None
self.ansible_command = AnsibleCommand()

def __enter__(self):
Expand All @@ -21,29 +39,103 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)

def _process_input_docs(self):
# Load the user input YAML docs from the input file
if os.path.isabs(self.file):
path_to_load = self.file
else:
path_to_load = os.path.join(os.getcwd(), self.file)
user_file_stream = open(path_to_load, 'r')
self.input_docs = safe_load_all(user_file_stream)

# Merge the input docs with defaults
with DefaultMerger(self.input_docs) as doc_merger:
self.input_docs = doc_merger.run()

# Get the cluster model
self.cluster_model = select_single(self.input_docs, lambda x: x.kind == 'epiphany-cluster')
if self.cluster_model is None:
raise Exception('No cluster model defined in input YAML file')

# Validate input documents
with SchemaValidator(self.cluster_model, self.input_docs) as schema_validator:
schema_validator.run()

# Get backup config document
self.backup_doc = select_single(self.input_docs, lambda x: x.kind == 'configuration/backup')

# Get recovery config document
self.recovery_doc = select_single(self.input_docs, lambda x: x.kind == 'configuration/recovery')

# Derive the build directory path
self.build_directory = get_build_path(self.cluster_model.specification.name)

def _process_component_config(self, document):
if self.parsed_components is not None:
available_components = set(document.specification.components.keys())
self.component_dict = components_to_dict(self.parsed_components, available_components)

def backup(self):
for component in sorted(self.components):
self.upgrade_patch_files_and_run('backup', component)
"""Backup all enabled components."""

self._process_input_docs()
self._process_component_config(self.backup_doc)
self._update_role_files_and_vars('backup', self.backup_doc)

# Execute all enabled component playbooks sequentially
for component_name, component_config in sorted(self.backup_doc.specification.components.items()):
if self.component_dict:
# Override yaml config with command line parameters
if self.component_dict[component_name]:
self._update_playbook_files_and_run('backup', component_name)
else:
if component_config.enabled:
self._update_playbook_files_and_run('backup', component_name)

return 0

def recovery(self):
for component in sorted(self.components):
self.upgrade_patch_files_and_run('recovery', component)
"""Recover all enabled components."""

self._process_input_docs()
self._process_component_config(self.recovery_doc)
self._update_role_files_and_vars('recovery', self.recovery_doc)

# Execute all enabled component playbooks sequentially
for component_name, component_config in sorted(self.recovery_doc.specification.components.items()):
if self.component_dict:
# Override yaml config with command line parameters
if self.component_dict[component_name]:
self._update_playbook_files_and_run('recovery', component_name)
else:
if component_config.enabled:
self._update_playbook_files_and_run('recovery', component_name)

return 0

def upgrade_patch_files_and_run(self, action, component):
self.logger.info(f'Running {action} on {component}...')
def _update_role_files_and_vars(self, action, document):
self.logger.info(f'Updating {action} role files...')

#copy role files
# Copy role files
roles_build_path = os.path.join(self.build_directory, 'ansible/roles', action)
roles_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH, 'roles', action)
copy_files_recursively(roles_source_path, roles_build_path)

#copy playbook file
playbook_build_path = os.path.join(self.build_directory, 'ansible/') + action + '_' + component + '.yml'
playbook_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH) + action + '_' + component + '.yml'
# Render role vars
vars_dir = os.path.join(roles_build_path, 'vars')
os.makedirs(vars_dir, exist_ok=True)
vars_file_path = os.path.join(vars_dir, 'main.yml')
with open(vars_file_path, 'w') as stream:
dump(document, stream)

def _update_playbook_files_and_run(self, action, component):
self.logger.info(f'Running {action} on {component}...')

# Copy playbook file
playbook_build_path = os.path.join(self.build_directory, 'ansible', f'{action}_{component}.yml')
playbook_source_path = os.path.join(AnsibleRunner.ANSIBLE_PLAYBOOKS_PATH, f'{action}_{component}.yml')
copy_file(playbook_source_path, playbook_build_path)

#run the playbook
# Run the playbook
inventory_path = get_inventory_path_for_build(self.build_directory)
self.ansible_command.run_playbook(inventory=inventory_path, playbook_path=playbook_build_path)
64 changes: 23 additions & 41 deletions core/src/epicli/cli/epicli.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from cli.helpers.query_yes_no import query_yes_no
from cli.helpers.input_query import prompt_for_password
from cli.helpers.build_saver import save_to_file, get_output_path
from cli.helpers.argparse_helpers import get_component_parser
from cli.engine.spec.SpecCommand import SpecCommand


Expand Down Expand Up @@ -262,65 +263,46 @@ def run_validate(args):
'''


def _component_parser_for(available_components={}):
def parse_components(value):
parsed_items = set(
item_stripped
for item in value.split(',')
for item_stripped in [item.strip()]
if item_stripped
)
if len(parsed_items) == 1 and 'all' in parsed_items:
return set(available_components)
difference = parsed_items - set(available_components)
if difference:
raise Exception('Error parsing components: invalid values present')
return parsed_items
return parse_components


def backup_parser(subparsers):
sub_parser = subparsers.add_parser('backup',
description='[Experimental]: Backups existing Epiphany Platform components.')
sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True,
help='Absolute path to directory with build artifacts.')
"""Configure and execute backup of cluster components."""

available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'}
sub_parser = subparsers.add_parser('backup',
description='Create backup of cluster components.')
sub_parser.add_argument('-f', '--file', dest='file', type=str,
help='File with infrastructure/configuration definitions to use.')

enabled_components = set(available_components) # enable everything by default
enabled_components_joined = ','.join(sorted(enabled_components))
available_components = {'kubernetes', 'load_balancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'}

sub_parser.add_argument('-c', '--components', dest='components', type=_component_parser_for(available_components), required=False,
help=f'Specify comma-separated list of components to backup (defaults to "{enabled_components_joined}").',
default=enabled_components_joined)
sub_parser.add_argument('-c', '--components', dest='components', type=get_component_parser(available_components), required=False,
help=f'Specify comma-separated list of components to backup.',
default=None) # "None" indicates that the yaml config will be used

def run_backup(args):
experimental_query()
adjust_paths_from_build(args)
adjust_paths_from_file(args)
with PatchEngine(args) as engine:
return engine.backup()

sub_parser.set_defaults(func=run_backup)


def recovery_parser(subparsers):
sub_parser = subparsers.add_parser('recovery',
description='[Experimental]: Recover from existing backup.')
sub_parser.add_argument('-b', '--build', dest='build_directory', type=str, required=True,
help='Absolute path to directory with build artifacts.')
"""Configure and execute recovery of cluster components."""

available_components = {'kubernetes', 'loadbalancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'}
sub_parser = subparsers.add_parser('recovery',
description='Recover from existing backup.')
sub_parser.add_argument('-f', '--file', dest='file', type=str,
help='File with infrastructure/configuration definitions to use.')

enabled_components = set() # disable everything by default
enabled_components_joined = ','.join(sorted(enabled_components))
available_components = {'kubernetes', 'load_balancer', 'logging', 'monitoring', 'postgresql', 'rabbitmq'}

sub_parser.add_argument('-c', '--components', dest='components', type=_component_parser_for(available_components), required=False,
help=f'Specify comma-separated list of components to restore (defaults to "{enabled_components_joined}").',
default=enabled_components_joined)
sub_parser.add_argument('-c', '--components', dest='components', type=get_component_parser(available_components), required=False,
help=f'Specify comma-separated list of components to recover.',
default=None) # "None" indicates that the yaml config will be used

def run_recovery(args):
experimental_query()
adjust_paths_from_build(args)
if not query_yes_no('Do you really want to perform recovery?'):
return 0
adjust_paths_from_file(args)
with PatchEngine(args) as engine:
return engine.recovery()

Expand Down
43 changes: 43 additions & 0 deletions core/src/epicli/cli/helpers/argparse_helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
from cli.helpers.ObjDict import ObjDict


def get_component_parser(available_components):
"""Return comma-separated component list parser."""

def parse_components(value):
parsed_items = set(
item_stripped
for item in value.split(',')
for item_stripped in [item.strip()]
if item_stripped
)

if len(parsed_items) == 1:
if 'all' in parsed_items:
return set(available_components)
if 'none' in parsed_items:
return set()

difference = parsed_items - set(available_components)
if difference:
raise Exception('Error parsing components: invalid values present')

return parsed_items

return parse_components


def components_to_dict(parsed_components, available_components):
"""Return an ObjDict of component -> boolean value pairs (enabled/disabled)."""

parsed_components = frozenset(parsed_components)
available_components = frozenset(available_components)

difference = parsed_components - available_components
if difference:
raise Exception('Error parsing components: invalid values present')

return ObjDict(
(component_name, component_name in parsed_components)
for component_name in available_components
)
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
become_method: sudo
serial: 1
tasks:
- import_role:
name: backup
tasks_from: kubernetes
- when: specification.components.kubernetes.enabled | default(false)
block:
- import_role:
name: backup
tasks_from: kubernetes
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
---
# Ansible playbook for backing up load_balancer config

- hosts: haproxy[0]
become: true
become_method: sudo
serial: 1
tasks:
- when: specification.components.load_balancer.enabled | default(false)
block:
- include_vars:
file: roles/haproxy/vars/main.yml
name: component_vars
- import_role:
name: backup
tasks_from: load_balancer_haproxy_etc

This file was deleted.

34 changes: 21 additions & 13 deletions core/src/epicli/data/common/ansible/playbooks/backup_logging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,31 @@
become_method: sudo
serial: 1
tasks:
- import_role:
name: backup
tasks_from: logging_elasticsearch_snapshot
- import_role:
name: backup
tasks_from: logging_elasticsearch_etc
vars_files:
- roles/logging/vars/main.yml
- when: specification.components.logging.enabled | default(false)
block:
- include_vars:
file: roles/logging/vars/main.yml
name: component_vars
- import_role:
name: backup
tasks_from: logging_elasticsearch_snapshot
- import_role:
name: backup
tasks_from: logging_elasticsearch_etc

- hosts: kibana[0]
gather_facts: true
become: true
become_method: sudo
serial: 1
tasks:
- import_role:
name: backup
tasks_from: logging_kibana_etc
vars_files:
- roles/kibana/vars/main.yml
- when: specification.components.logging.enabled | default(false)
block:
- include_vars:
file: roles/kibana/vars/main.yml
name: component_vars
- import_role:
name: backup
tasks_from: logging_kibana_etc
vars:
snapshot_name: "{{ hostvars[groups.logging.0].snapshot_name }}"
Loading