Skip to content

Commit

Permalink
Ensure ca-certificates package is in the latest version (#3169)
Browse files Browse the repository at this point in the history
* Ensure ca-certificates package is in the latest version

* Add tar to base packages for RHEL mode

* Ensure tar is not uninstalled too early

* Use constants instead of string literals

* Ignore non-critical DNF error

* Ensure dnf config-manager command

* Do not use constants for better readability

* Ensure epel repo is enabled

* Fix is_repo_enabled method

* Preserve epel-release package

* Remove accidental import

* Apply suggestions from code review

* Apply suggestions from 2nd review

* Fix `The same or higher version of epel-release is already installed` error
  • Loading branch information
to-bar authored Jun 10, 2022
1 parent db0a41b commit 2ced8b8
Show file tree
Hide file tree
Showing 10 changed files with 155 additions and 85 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,40 @@
from src.error import CriticalError


class Dnf(Command):
class DnfBase(Command):
"""
Interface for `dnf`
Base class for `dnf` interfaces
"""

def __init__(self, retries: int):
super().__init__('dnf', retries)

def _filter_non_critical_errors(self, stderr: str) -> str:
output_lines = [line for line in stderr.split('\n')
if not line.startswith('Failed to set locale, defaulting to')]

return '\n'.join(output_lines)


class Dnf(DnfBase):
"""
Interface for `dnf`
"""

def update(self, package: str = None,
disablerepo: str = None,
enablerepo: str = None,
ignore_already_installed_error: bool = False,
releasever: str = None,
assume_yes: bool = True):

"""
Interface for `dnf update`
:param package:
:param disablerepo:
:param enablerepo:
:param ignore_already_installed_error: if set to True,
`The same or higher version of {package} is already installed` error is ignored
:param releasever:
:param assume_yes: if set to True, -y flag will be used
"""
Expand All @@ -49,11 +63,18 @@ def update(self, package: str = None,
if 'error' in proc.stdout:
raise CriticalError(
f'Found an error. dnf update failed for package `{package}`, reason: `{proc.stdout}`')
if proc.stderr:

filtered_stderr: str = self._filter_non_critical_errors(proc.stderr)

if filtered_stderr:
if (ignore_already_installed_error
and all(string in filtered_stderr for string in
('The same or higher version', 'is already installed, cannot update it.'))):
return

raise CriticalError(
f'dnf update failed for packages `{package}`, reason: `{proc.stderr}`')


def install(self, package: str,
assume_yes: bool = True):
"""
Expand All @@ -72,7 +93,7 @@ def install(self, package: str,
if 'error' in proc.stdout:
raise CriticalError(
f'Found an error. dnf install failed for package `{package}`, reason: `{proc.stdout}`')
if proc.stderr:
if self._filter_non_critical_errors(proc.stderr):
raise CriticalError(
f'dnf install failed for package `{package}`, reason: `{proc.stderr}`')

Expand All @@ -87,29 +108,32 @@ def remove(self, package: str,
no_ask: str = '-y' if assume_yes else ''
self.run(['remove', no_ask, package])

def __get_repo_ids(self, repoinfo_extra_args: List[str] = None) -> List[str]:
repoinfo_args: List[str] = ['--quiet', '-y']

if repoinfo_extra_args:
repoinfo_args.extend(repoinfo_extra_args)

output = self.run(['repoinfo'] + repoinfo_args).stdout
repo_ids: List[str] = []

for line in output.splitlines():
if 'Repo-id' in line: # e.g. `Repo-id : epel`
repo_ids.append(line.split(':')[1].strip())

return repo_ids

def is_repo_enabled(self, repo: str) -> bool:
output = self.run(['repolist',
'--enabled',
'--quiet',
'-y']).stdout
if repo in output:
enabled_repos = self.__get_repo_ids()

if repo in enabled_repos:
return True

return False

def find_rhel_repo_id(self, patterns: List[str]) -> List[str]:
output = self.run(['repolist',
'--all',
'--quiet',
'-y']).stdout

repos: List[str] = []
for line in output.split('\n'):
for pattern in patterns:
if pattern in line:
repos.append(pattern)

return repos
def are_repos_enabled(self, repos: List[str]) -> bool:
enabled_repos: List[str] = self.__get_repo_ids()
return all(repo in enabled_repos for repo in repos)

def accept_keys(self):
# to accept import of repo's GPG key (for repo_gpgcheck=1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@ def get_variable(self, name: str) -> str:
chunks = var.split('=', maxsplit=1)
if name == chunks[0].strip():
value = chunks[1].strip()
break
return value

if not value:
raise DnfVariableNotfound(f'Variable not found: {name}')

return value
raise DnfVariableNotfound(f'Variable not found: {name}')
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
from pathlib import Path
from typing import List

from src.command.command import Command
from src.command.dnf import DnfBase
from src.error import CriticalError


class DnfDownload(Command):
class DnfDownload(DnfBase):
"""
Interface for `dnf download`
"""

def __init__(self, retries: int):
super().__init__('dnf', retries)

def download_packages(self, packages: List[str],
archlist: List[str],
destdir: Path,
Expand All @@ -38,6 +35,6 @@ def download_packages(self, packages: List[str],
if 'error' in process.stdout:
raise CriticalError(
f'Found an error. dnf download failed for packages `{packages}`, reason: `{process.stdout}`')
if process.stderr:
if self._filter_non_critical_errors(process.stderr):
raise CriticalError(
f'dnf download failed for packages `{packages}`, reason: `{process.stderr}`')
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,15 @@ def _cleanup(self):
"""
pass

def _clean_up_repository_files(self):
def _cleanup_packages(self):
"""
Additional routines before unpacking backup to remove repository files under the /etc directory.
Remove installed packages.
"""
pass

def _remove_repository_files(self):
"""
Additional routines before unpacking backup to remove all repository files under the /etc directory.
"""
pass

Expand All @@ -221,7 +227,7 @@ def __restore_repositories(self):
"""
if self._cfg.repos_backup_file.exists() and self._cfg.repos_backup_file.stat().st_size:
logging.info('Restoring repository files...')
self._clean_up_repository_files()
self._remove_repository_files()
self._tools.tar.unpack(filename=self._cfg.repos_backup_file,
directory=Path('/'),
absolute_names=True,
Expand Down Expand Up @@ -291,4 +297,9 @@ def run(self):
self._cleanup()
logging.info('Done running cleanup.')

# requires tar but has to be run after cleanup
self.__restore_repositories()

logging.info('Cleaning up installed packages...')
self._cleanup_packages()
logging.info('Done cleaning up installed packages.')
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def __init__(self, config: Config):
self.__installed_packages: List[str] = []

def __create_repo_paths(self):
for repo in self._repositories.keys():
self._repositories[repo]['path'] = Path('/etc/apt/sources.list.d') / f'{repo}.list'
for repo_id, repo_item in self._repositories.items():
repo_item['path'] = Path('/etc/apt/sources.list.d') / f'{repo_id}.list'

def _create_backup_repositories(self):
if not self._cfg.repos_backup_file.exists():
Expand All @@ -39,9 +39,12 @@ def _install_base_packages(self):
# install prerequisites which might be missing
installed_packages = self._tools.apt.list_installed_packages()

# Ensure ca-certificates package is in the latest version
self._tools.apt.install('ca-certificates')

for package in ['wget', 'gpg', 'curl', 'tar']:
if package not in installed_packages:
self._tools.apt.install(package, assume_yes=True)
self._tools.apt.install(package)
self.__installed_packages.append(package)
logging.info(f'- {package}')

Expand Down Expand Up @@ -123,16 +126,19 @@ def _download_grafana_dashboard(self, dashboard: str, output_file: Path):
def _download_crane_binary(self, url: str, dest: Path):
self._tools.wget.download(url, dest)

def _clean_up_repository_files(self):
for repofile in Path('/etc/apt/sources.list.d').iterdir():
repofile.unlink()
def _remove_repository_files(self):
logging.debug('Removing files from /etc/apt/sources.list.d...')
for repo_file in Path('/etc/apt/sources.list.d').iterdir():
logging.debug(f'- {repo_file.name}')
repo_file.unlink()
logging.debug('Done removing files.')

def _cleanup(self):
# cleaning up 3rd party repositories
for data in self._repositories.values():
if data['path'].exists():
data['path'].unlink()

# remove installed packages
def _cleanup_packages(self):
for package in self.__installed_packages:
self._tools.apt.remove(package)
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,16 @@ def __init__(self, config: Config):
super().__init__(config)
self.__all_queried_packages: Set[str] = set()
self.__archs: List[str] = [config.os_arch.value, 'noarch']
self.__base_packages: List[str] = ['curl', 'python3-dnf-plugins-core', 'wget']
self.__base_packages: List[str] = ['curl', 'python3-dnf-plugins-core', 'wget', 'tar']
self.__dnf_cache_dir: Path = Path('/var/cache/dnf')
self.__installed_packages: List[str] = []
self.__dnf_cache_path: Path = Path('/var/cache/dnf')

try:
dnf_config = configparser.ConfigParser()
with Path('/etc/dnf/dnf.conf').open() as dnf_config_file:
with Path('/etc/dnf/dnf.conf').open(encoding='utf-8') as dnf_config_file:
dnf_config.read(dnf_config_file)

self.__dnf_cache_path = Path(dnf_config['main']['cachedir'])
self.__dnf_cache_dir = Path(dnf_config['main']['cachedir'])
except FileNotFoundError:
logging.debug('RedHatFamilyMode.__init__(): dnf config file not found')
except configparser.Error as e:
Expand All @@ -50,26 +50,36 @@ def _create_backup_repositories(self):
logging.debug('Done.')

def _install_base_packages(self):
# Ensure `dnf config-manager` command
if not self._tools.rpm.is_package_installed('dnf-plugins-core'):
self._tools.dnf.install('dnf-plugins-core')
self.__installed_packages.append('dnf-plugins-core')

# Bug in RHEL 8.4 https://bugzilla.redhat.com/show_bug.cgi?id=2004853
releasever = '8' if self._tools.dnf_config_manager.get_variable('releasever') == '8.4' else None
self._tools.dnf.update(package='libmodulemd', releasever=releasever)

# some packages are from EPEL repo
# make sure that we reinstall it before proceeding
if self._tools.rpm.is_package_installed('epel-release'):
if not self._tools.dnf.is_repo_enabled('epel') or not self._tools.dnf.is_repo_enabled('epel-modular'):
self._tools.dnf.remove('epel-release')
# epel-release package is re-installed when repo it provides is not enabled
epel_package_initially_present: bool = self._tools.rpm.is_package_installed('epel-release')

self._tools.dnf.install('https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm')
self.__installed_packages.append('epel-release')
if epel_package_initially_present and not self._tools.dnf.are_repos_enabled(['epel', 'epel-modular']):
self._tools.dnf.remove('epel-release')

# some packages are from EPEL repo, ensure the latest version
if not self._tools.rpm.is_package_installed('epel-release'):
self._tools.dnf.install('https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm')

if not epel_package_initially_present:
self.__installed_packages.append('epel-release')
else:
self._tools.dnf.update('https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm',
ignore_already_installed_error=True)

self.__remove_dnf_cache_for_custom_repos()
self._tools.dnf.makecache(True)
self._tools.dnf.makecache(timer=True)

# tar does not come by default from image. We install it, but don't want to remove it
if not self._tools.rpm.is_package_installed('tar'):
self._tools.dnf.install('tar')
# Ensure ca-certificates package is in the latest version
self._tools.dnf.install('ca-certificates')

for package in self.__base_packages:
if not self._tools.rpm.is_package_installed(package):
Expand Down Expand Up @@ -115,22 +125,33 @@ def _add_third_party_repositories(self):

def __remove_dnf_cache_for_custom_repos(self):
# clean metadata for upgrades (when the same package can be downloaded from changed repo)
repocaches: List[str] = list(self.__dnf_cache_path.iterdir())
cache_paths: List[Path] = list(self.__dnf_cache_dir.iterdir())

def get_matched_paths(repo_id: str, paths: List[Path]) -> List[Path]:
return [path for path in paths if path.name.startswith(repo_id)]

id_names = [
repo_ids = [
'2ndquadrant',
'docker-ce',
'epel',
] + [self._repositories[key]['id'] for key in self._repositories.keys()]
] + [repo['id'] for repo in self._repositories.values()]

matched_cache_paths: List[Path] = []

for repo_id in repo_ids:
matched_cache_paths.extend(get_matched_paths(repo_id, cache_paths))

for repocache in repocaches:
matched_ids = [repocache.name.startswith(repo_name) for repo_name in id_names]
if any(matched_ids):
if matched_cache_paths:
matched_cache_paths.sort()
logging.debug(f'Removing DNF cache files from {self.__dnf_cache_dir}...')

for path in matched_cache_paths:
logging.debug(f'- {path.name}')
try:
if repocache.is_dir():
shutil.rmtree(str(repocache))
if path.is_dir():
shutil.rmtree(str(path))
else:
repocache.unlink()
path.unlink()
except FileNotFoundError:
logging.debug('__remove_dnf_cache_for_custom_repos: cache directory already removed')

Expand Down Expand Up @@ -216,14 +237,17 @@ def _download_grafana_dashboard(self, dashboard: str, output_file: Path):
def _download_crane_binary(self, url: str, dest: Path):
self._tools.wget.download(url, dest, additional_params=False)

def _clean_up_repository_files(self):
for repofile in Path('/etc/yum.repos.d').iterdir():
repofile.unlink()
def _remove_repository_files(self):
logging.debug('Removing files from /etc/yum.repos.d...')
for repo_file in Path('/etc/yum.repos.d').iterdir():
logging.debug(f'- {repo_file.name}')
repo_file.unlink()
logging.debug('Done removing files.')

def _cleanup(self):
# remove installed packages
self.__remove_dnf_cache_for_custom_repos()

def _cleanup_packages(self):
for package in self.__installed_packages:
if self._tools.rpm.is_package_installed(package):
self._tools.dnf.remove(package)

self.__remove_dnf_cache_for_custom_repos()
Loading

0 comments on commit 2ced8b8

Please sign in to comment.