diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py b/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py index 5e4be35a67..0a2d6876e1 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/command/tar.py @@ -13,66 +13,91 @@ def __init__(self): super().__init__('tar', 1) def pack(self, filename: Path, - target: str, - directory: Path = None, - verbose: bool = False, + targets: List[Path], + + # short flags: compress: bool = False, + verbose: bool = False, + preserve: bool = False, + + # long flags: + absolute_names: bool = False, + directory: Path = None, verify: bool = False): """ - Create a tar archive + Create a tar archive. :param filename: name for the archive to be created - :param target: files to be archived - :param directory: change directory before doing any actions + :param targets: files to be archived + + :param compress: use zlib compression :param verbose: use verbose mode - :param uncompress: use zlib compression + :param preserve: extract information about file permissions + + :param absolute_names: don't strip leading slashes from file names + :param directory: change directory before doing any actions :param verify: check file integrity """ short_flags: List[str] = ['-c'] # -czvf flags tar_params: List[str] = [str(filename)] # all the other params + # short flags: if compress: short_flags.append('z') if verbose: short_flags.append('v') + if preserve: + short_flags.append('p') + short_flags.append('f') - if verify: - tar_params.append('--verify') + # long flags: + if absolute_names: + tar_params.append('--absolute-names') if directory is not None: tar_params.extend(['--directory', str(directory)]) - if target: - tar_params.append(target) + if verify: + tar_params.append('--verify') + + for target in targets: + tar_params.append(str(target)) self.run([''.join(short_flags)] + tar_params) def unpack(self, filename: Path, - target: str = '', + target: Path = None, + + # short flags: + uncompress: bool = True, + verbose: bool = False, + + # long flags: absolute_names: bool = False, directory: Path = None, overwrite: bool = True, - strip_components: int = 0, - uncompress: bool = True, - verbose: bool = False): + strip_components: int = 0): """ - Unpack a tar archive + Unpack a tar archive. :param filename: file to be extracted :param target: name for the output file + + :param uncompress: use zlib compression + :param verbose: use verbose mode + :param absolute_names: use abs path names :param directory: change directory before doing any actions :param overwrite: overwrite existing files when extracting :param strip_components: strip leading components from file names on extraction - :param uncompress: use zlib compression - :param verbose: use verbose mode """ short_flags: List[str] = ['-x'] # -xzvf flags tar_params: List[str] = [str(filename)] # all the other params + # short flags if uncompress: short_flags.append('z') @@ -81,6 +106,7 @@ def unpack(self, filename: Path, short_flags.append('f') + # long flags if absolute_names: tar_params.append('--absolute-names') @@ -90,10 +116,10 @@ def unpack(self, filename: Path, if strip_components: tar_params.append(f'--strip-components={str(strip_components)}') - if target: - tar_params.append(target) - if overwrite: tar_params.append('--overwrite') + if target is not None: + tar_params.append(str(target)) + self.run([''.join(short_flags)] + tar_params) diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/config.py b/ansible/playbooks/roles/repository/files/download-requirements/src/config.py index 7f4540be42..6cd1a64d8a 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/config.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/config.py @@ -34,7 +34,6 @@ def __init__(self, argv: List[str]): self.dest_images: Path self.dest_packages: Path self.distro_subdir: Path - self.enable_backup: bool self.is_log_file_enabled: bool self.log_file: Path self.os_arch: OSArch @@ -47,6 +46,7 @@ def __init__(self, argv: List[str]): self.rerun: bool self.retries: int self.script_path: Path + self.was_backup_created: bool = False self.__add_args(argv) @@ -70,10 +70,7 @@ def __log_info_summary(self): lines.append(f'- grafana dashboards: {str(self.dest_grafana_dashboards)}') lines.append(f'- images: {str(self.dest_images)}') lines.append(f'- packages: {str(self.dest_packages)}') - - lines.append(f'Enable repos backup: {"Yes" if self.enable_backup else "No"}') - if self.enable_backup: - lines.append(f'Repos backup file: {str(self.repos_backup_file)}') + lines.append(f'Repos backup file: {str(self.repos_backup_file)}') if self.is_log_file_enabled: lines.append(f'Log file location: {str(self.log_file.absolute())}') @@ -97,8 +94,6 @@ def __create_parser(self) -> ArgumentParser: 'when using `detect`, script will try to find out which OS is being used') # optional arguments: - parser.add_argument('--enable-repos-backup', '-b', action='store_true', dest='enable_backup', default=False, - help=('when used, backup archive for packages will be created and used')), parser.add_argument('--repos-backup-file', metavar='BACKUP_FILE', action='store', dest='repos_backup_file', default='/var/tmp/enabled-system-repos.tar', help='path to a backup file') @@ -109,7 +104,7 @@ def __create_parser(self) -> ArgumentParser: default=Path('./download-requirements.log'), help='logs will be saved to this file') parser.add_argument('--log-level', metavar='LOG_LEVEL', type=str, action='store', dest='log_level', - default='info', help='set up log level, available levels: (error|warn|info|debug`)') + default='info', help='set up log level, available levels: (error|warn|info|debug)') parser.add_argument('--no-logfile', action='store_true', dest='no_logfile', help='no logfile will be created') @@ -207,7 +202,6 @@ def __add_args(self, argv: List[str]): self.dest_packages = self.dest_dir / 'packages' # add optional arguments - self.enable_backup = args['enable_backup'] self.os_arch = OSArch(os.uname().machine) self.repos_backup_file = Path(args['repos_backup_file']) self.retries = args['retries'] diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py index 611b1e89a4..93b2f5b6b7 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/base_mode.py @@ -71,15 +71,15 @@ def __parse_requirements(self) -> Dict[str, Any]: return reqs - def _use_backup_repositories(self): + def _create_backup_repositories(self): """ - Check if there were any critical issues and if so, try to restore the state using backup + Create a backup of package repository files under the /etc directory. """ raise NotImplementedError def _add_third_party_repositories(self): """ - Add third party repositories for target OS's package manager + Add third party repositories for target OS's package manager. """ raise NotImplementedError @@ -91,7 +91,7 @@ def _install_base_packages(self): def _download_packages(self): """ - Download packages under `self._requirements['packages']` using target OS's package manager + Download packages under `self._requirements['packages']` using target OS's package manager. """ raise NotImplementedError @@ -140,7 +140,7 @@ def __download_files(self): def __download_grafana_dashboards(self): """ - Download grafana dashboards under `self._requirements['grafana-dashboards']` + Download grafana dashboards under `self._requirements['grafana-dashboards']`. """ dashboards: Dict[str, Dict] = self._requirements['grafana-dashboards'] for dashboard in dashboards: @@ -158,7 +158,7 @@ def __download_grafana_dashboards(self): def __download_crane(self): """ - Download Crane package if needed and setup it's environment + Download Crane package if needed and setup it's environment. """ crane_path = self._cfg.dest_dir / 'crane' crane_package_path = Path(f'{crane_path}.tar.gz') @@ -169,7 +169,7 @@ def __download_crane(self): logging.debug('crane - checksum ok, skipped') else: self._download_crane_binary(first_crane, crane_package_path) - self._tools.tar.unpack(crane_package_path, 'crane', directory=self._cfg.dest_dir) + self._tools.tar.unpack(crane_package_path, Path('crane'), directory=self._cfg.dest_dir) chmod(crane_path, 0o0755) # create symlink to the crane file so that it'll be visible in shell @@ -180,7 +180,7 @@ def __download_crane(self): def _download_images(self): """ - Download images under `self._requirements['images']` using Crane + Download images under `self._requirements['images']` using Crane. """ platform: str = 'linux/amd64' if self._cfg.os_arch.X86_64 else 'linux/arm64' images = self._requirements['images'] @@ -200,10 +200,23 @@ def _download_images(self): def _cleanup(self): """ - Optional step for cleanup routines + Optional step for cleanup routines. """ pass + def __restore_repositories(self): + """ + Restore the state of repository files under the /etc dir. + """ + if self._cfg.repos_backup_file.exists() and self._cfg.repos_backup_file.stat().st_size: + logging.info('Restoring repository files...') + self._tools.tar.unpack(filename=self._cfg.repos_backup_file, + directory=Path('/'), + absolute_names=True, + uncompress=False, + verbose=True) + logging.info('Done restoring repository files.') + def run(self): """ Run target mode. @@ -218,9 +231,10 @@ def run(self): self._cfg.dest_images.mkdir(exist_ok=True, parents=True) self._cfg.dest_packages.mkdir(exist_ok=True, parents=True) - logging.info('Checking backup repositories...') - self._use_backup_repositories() - logging.info('Done checking backup repositories.') + self._create_backup_repositories() + + if not self._cfg.was_backup_created: + self.__restore_repositories() logging.info('Installing base packages...') self._install_base_packages() @@ -259,3 +273,5 @@ def run(self): logging.info('Running cleanup...') self._cleanup() logging.info('Done running cleanup.') + + self.__restore_repositories() diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py index 1dd200362b..26c2394114 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/debian_family_mode.py @@ -22,19 +22,19 @@ def __create_repo_paths(self): for repo in self._repositories.keys(): self._repositories[repo]['path'] = Path('/etc/apt/sources.list.d') / f'{repo}.list' - def _use_backup_repositories(self): - sources = Path('/etc/apt/sources.list') - if not sources.exists() or not sources.stat().st_size: - if self._cfg.repos_backup_file.exists() and self._cfg.enable_backup: - logging.warn('OS repositories seems missing, restoring...') - self._tools.tar.unpack(filename=self._cfg.repos_backup_file, - directory=Path('/'), - absolute_names=True, - uncompress=False, - verbose=True) - else: - logging.warn(f'{str(sources)} seems to be missing, you either know what you are doing or ' - 'you need to fix your repositories') + def _create_backup_repositories(self): + if not self._cfg.repos_backup_file.exists(): + logging.debug('Creating backup for system repositories...') + self._tools.tar.pack(self._cfg.repos_backup_file, + targets=[Path('/etc/apt/sources.list'), + Path('/etc/apt/sources.list.d')], + verbose=True, + preserve=True, + absolute_names=True, + verify=True) + + self._cfg.was_backup_created = True + logging.debug('Done.') def _install_base_packages(self): # install prerequisites which might be missing @@ -47,11 +47,6 @@ def _install_base_packages(self): logging.info(f'- {package}') def _add_third_party_repositories(self): - # backup custom repositories to avoid possible conflicts - for repo_file in Path('/etc/apt/sources.list.d').iterdir(): - if repo_file.name.endswith('.list'): - repo_file.rename(f'{repo_file}.bak') - # add third party keys for repo in self._repositories: data = self._repositories[repo] @@ -125,10 +120,9 @@ def _cleanup(self): if data['path'].exists(): data['path'].unlink() - # restore masked custom repositories to their original names + # removed custom repositories to their original names for repo_file in Path('/etc/apt/sources.list.d').iterdir(): - if repo_file.name.endswith('.bak'): - move(str(repo_file.absolute()), str(repo_file.with_suffix('').absolute())) + repo_file.unlink() # remove installed packages for package in self.__installed_packages: diff --git a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py index 4632d3afcb..24d1031dc0 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/src/mode/red_hat_family_mode.py @@ -19,25 +19,26 @@ def __init__(self, config: Config): self.__base_packages: List[str] = ['yum-utils', 'wget', 'curl', 'tar'] self.__installed_packages: List[str] = [] - def _use_backup_repositories(self): - sources = Path('/etc/yum.repos.d/epirepo.repo') - if sources.exists() and sources.stat().st_size: - if self._cfg.repos_backup_file.exists() and self._cfg.enable_backup: - logging.warn('OS repositories seems missing, restoring...') - self._tools.tar.unpack(filename=self._cfg.repos_backup_file, - directory=Path('/'), - absolute_names=True, - uncompress=False, - verbose=True) - else: - logging.warn(f'{str(sources)} seems to be missing, you either know what you are doing or ' - 'you need to fix your repositories') + def _create_backup_repositories(self): + if not self._cfg.repos_backup_file.exists() or not self._cfg.repos_backup_file.stat().st_size: + logging.debug('Creating backup for system repositories...') + self._tools.tar.pack(self._cfg.repos_backup_file, + [Path('/etc/yum.repos.d/')], + verbose=True, + directory=Path('/'), + verify=True) + + self._cfg.was_backup_created = True + logging.debug('Done.') def _install_base_packages(self): # some packages are from EPEL repo - if not self._tools.rpm.is_package_installed('epel-release'): - self._tools.yum.install('https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm') - self.__installed_packages.append('epel-release') + # make sure that we reinstall it before proceeding + if self._tools.rpm.is_package_installed('epel-release'): + self._tools.yum.remove('epel-release') + + self._tools.yum.install('https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm') + self.__installed_packages.append('epel-release') self.__remove_yum_cache_for_untracked_repos() self._tools.yum.makecache(True) @@ -48,19 +49,11 @@ def _install_base_packages(self): self.__installed_packages.append(package) def __enable_repos(self, repo_id_patterns: List[str]): - """ - :param repo_id_patterns: - """ for repo in self._tools.yum.find_rhel_repo_id(repo_id_patterns): if not self._tools.yum.is_repo_enabled(repo): self._tools.yum_config_manager.enable_repo(repo) def _add_third_party_repositories(self): - # backup custom repositories to avoid possible conflicts - for repo_file in Path('/etc/yum.repos.d/').iterdir(): - if repo_file.name.endswith('.repo'): - shutil.copy(str(repo_file), f'{repo_file}.bak') - # Fix for RHUI client certificate expiration [#2318] if self._tools.yum.is_repo_enabled('rhui-microsoft-azure-rhel'): self._tools.yum.update('rhui-microsoft-azure-rhel*') @@ -202,10 +195,9 @@ def _download_crane_binary(self, url: str, dest: Path): self._tools.wget.download(url, dest, additional_params=False) def _cleanup(self): - # restore repo files + # remove repo files for repo_file in Path('/etc/yum.repos.d').iterdir(): - if repo_file.name.endswith('.bak'): - shutil.move(str(repo_file.absolute()), str(repo_file.with_suffix('').absolute())) + repo_file.unlink() # remove installed packages for package in self.__installed_packages: diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_repoquery.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_repoquery.py index 3190dbf7e5..3ed60caf0b 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_repoquery.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_repoquery.py @@ -4,17 +4,25 @@ def test_interface_query(mocker): - ''' Check argument construction for `repoquery` ''' + ''' Check argument construction for `repoquery` - generic query ''' with CommandRunMock(mocker, Repoquery(1).query, {'package': 'vim', 'queryformat': 'some_format', - 'arch': 'some_arch', - 'requires': True, - 'resolve': True}) as call_args: + 'arch': 'some_arch'}) as call_args: + assert call_args == ['repoquery', + '--queryformat', + 'some_format', + '--archlist=some_arch,noarch', + 'vim'] + +def test_interface_get_dependencies(mocker): + ''' Check argument construction for `repoquery` - dependencies query ''' + with CommandRunMock(mocker, Repoquery(1).get_dependencies, {'package': 'vim', + 'queryformat': 'some_format', + 'arch': 'some_arch'}) as call_args: assert call_args == ['repoquery', '--requires', '--resolve', '--queryformat', 'some_format', '--archlist=some_arch,noarch', - 'vim' - ] + 'vim'] diff --git a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py index 6187aa0c5b..2b7c43f3eb 100644 --- a/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py +++ b/ansible/playbooks/roles/repository/files/download-requirements/tests/command/test_tar.py @@ -1,29 +1,33 @@ -from tests.mocks.command_run_mock import CommandRunMock +from pathlib import Path +from tests.mocks.command_run_mock import CommandRunMock from src.command.tar import Tar def test_interface_pack(mocker): ''' Check argument construction for `tar -cf` ''' with CommandRunMock(mocker, Tar().pack, {'filename': '/tmp/package.tar.gz', - 'target': '*', - 'directory': '/some/directory', - 'verbose': True, + 'targets': [Path('*')], 'compress': True, + 'verbose': True, + 'preserve': True, + 'absolute_names': True, + 'directory': Path('/some/directory'), 'verify': True}) as call_args: - assert call_args == ['tar', '-czvf', '/tmp/package.tar.gz', '--verify', '--directory', '/some/directory', '*'] + assert call_args == ['tar', '-czvpf', '/tmp/package.tar.gz', + '--absolute-names', '--directory', '/some/directory', '--verify', '*'] def test_interface_unpack(mocker): ''' Check argument construction for `tar -xf` ''' - with CommandRunMock(mocker, Tar().unpack, {'filename': '/tmp/package.tar.gz', - 'target': 'some_target', + with CommandRunMock(mocker, Tar().unpack, {'filename': Path('/tmp/package.tar.gz'), + 'target': Path('some_target'), 'absolute_names': True, - 'directory': '/some/directory', + 'directory': Path('/some/directory'), 'overwrite': True, 'verbose': True, 'uncompress': True, 'strip_components': 2}) as call_args: assert call_args == ['tar', '-xzvf', '/tmp/package.tar.gz', '--absolute-names', '--directory', '/some/directory', - '--strip-components=2', 'some_target', '--overwrite'] + '--strip-components=2', '--overwrite', 'some_target'] diff --git a/ansible/playbooks/roles/repository/tasks/download-requirements.yml b/ansible/playbooks/roles/repository/tasks/download-requirements.yml index 3725546fdd..534f23675f 100644 --- a/ansible/playbooks/roles/repository/tasks/download-requirements.yml +++ b/ansible/playbooks/roles/repository/tasks/download-requirements.yml @@ -10,7 +10,6 @@ "{{ download_requirements_script }}" \ /var/www/html/epirepo \ "{{ download_requirements_os_name }}" \ - --enable-repos-backup \ --repos-backup-file /var/tmp/enabled-system-repos.tar \ --no-logfile |& tee >(systemd-cat --identifier=download-requirements.py) diff --git a/cli/src/commands/Prepare.py b/cli/src/commands/Prepare.py index b05c9ba535..7b085c6698 100644 --- a/cli/src/commands/Prepare.py +++ b/cli/src/commands/Prepare.py @@ -39,7 +39,9 @@ def prepare(self) -> int: distro_path: Path = arch_path / self.os dest_path: Path = Path(Config().output_dir) - dest_path /= self.output_dir if self.output_dir else 'prepare_scripts' + os = self.os.replace('-', '_').replace('.', '') + arch = self.arch.replace('-', '_').replace('.', '') + dest_path /= self.output_dir if self.output_dir else f'prepare_scripts_{os}_{arch}' charts_path = dest_path / 'charts/system' diff --git a/docs/changelogs/CHANGELOG-2.0.md b/docs/changelogs/CHANGELOG-2.0.md index 22db95ab26..ab96f24d6b 100644 --- a/docs/changelogs/CHANGELOG-2.0.md +++ b/docs/changelogs/CHANGELOG-2.0.md @@ -16,6 +16,7 @@ - [#805](https://github.com/epiphany-platform/epiphany/issues/805) - Refactor download-requirements script - [#2858](https://github.com/epiphany-platform/epiphany/issues/2858) - Make Ruby spec tests code compliant with rubocop lint rules - [#2975](https://github.com/epiphany-platform/epiphany/issues/2975) - Copy only required files +- [#2991](https://github.com/epiphany-platform/epiphany/issues/2991) - Add automatic backup creation for download requirements ### Fixed