diff --git a/changelogs/fragments/546-constraints.yml b/changelogs/fragments/546-constraints.yml new file mode 100644 index 00000000..d97b8e08 --- /dev/null +++ b/changelogs/fragments/546-constraints.yml @@ -0,0 +1,2 @@ +minor_changes: + - "Support a constraints file that allows to fix dependencies for the ``new-ansible`` and ``prepare`` subcommands (https://github.com/ansible-community/antsibull/pull/546)." diff --git a/src/antsibull/build_ansible_commands.py b/src/antsibull/build_ansible_commands.py index cb5ae586..f3e7a85c 100644 --- a/src/antsibull/build_ansible_commands.py +++ b/src/antsibull/build_ansible_commands.py @@ -43,6 +43,7 @@ from .dep_closure import check_collection_dependencies from .tagging import get_collections_tags from .utils.get_pkg_data import get_antsibull_data +from .versions import feature_freeze_version, load_constraints_if_exists if TYPE_CHECKING: from _typeshed import StrPath @@ -89,6 +90,7 @@ async def get_collection_and_core_versions( ansible_core_version: PypiVer | None, galaxy_url: str, ansible_core_allow_prerelease: bool = False, + constraints: dict[str, SemVerSpec] | None = None, ) -> tuple[dict[str, SemVer], PypiVer | None]: """ Retrieve the latest version of each collection. @@ -101,6 +103,9 @@ async def get_collection_and_core_versions( :returns: Tuple consisting of a dict mapping collection name to latest version, and of the ansible-core version if it was provided. """ + if constraints is None: + constraints = {} + requestors = {} async with aiohttp.ClientSession() as aio_session: lib_ctx = app_context.lib_ctx.get() @@ -109,7 +114,10 @@ async def get_collection_and_core_versions( for collection_name, version_spec in deps.items(): requestors[collection_name] = await pool.spawn( client.get_latest_matching_version( - collection_name, version_spec, pre=True + collection_name, + version_spec, + pre=True, + constraint=constraints.get(collection_name), ) ) if ansible_core_version: @@ -421,111 +429,6 @@ def _extract_python_requires( ) -class _FeatureFreezeVersion: - """ - Helper for making semantic version range specification valid for feature freeze. - """ - - def __init__(self, spec: str, collection_name: str): - self.potential_clauses: list = [] - self.spec = spec - self.collection_name = collection_name - self.upper_operator: str | None = None - self.upper_version: SemVer | None = None - self.min_version: SemVer | None = None - self.pinned = False - - spec_obj = SemVerSpec(spec) - - # If there is a single clause, it's available as spec_obj.clause; - # multiple ones are available as spec_obj.clause.clauses. - try: - clauses = spec_obj.clause.clauses - except AttributeError: - clauses = [spec_obj.clause] - - self.clauses = clauses - for clause in clauses: - self._process_clause(clause) - - def _process_clause(self, clause) -> None: - """ - Process one clause of the version range specification. - """ - if clause.operator in ("<", "<="): - if self.upper_operator is not None: - raise ValueError( - f"Multiple upper version limits specified for {self.collection_name}:" - f" {self.spec}" - ) - self.upper_operator = clause.operator - self.upper_version = clause.target - # Omit the upper bound as we're replacing it - return - - if clause.operator == ">=": - # Save the lower bound so we can write out a new compatible version - if self.min_version is not None: - raise ValueError( - f"Multiple minimum versions specified for {self.collection_name}: {self.spec}" - ) - self.min_version = clause.target - - if clause.operator == ">": - raise ValueError( - f"Strict lower bound specified for {self.collection_name}: {self.spec}" - ) - - if clause.operator == "==": - self.pinned = True - - self.potential_clauses.append(clause) - - def compute_new_spec(self) -> str: - """ - Compute a new version range specification that only allows newer patch releases that also - match the original range specification. - """ - if self.pinned: - if len(self.clauses) > 1: - raise ValueError( - f"Pin combined with other clauses for {self.collection_name}: {self.spec}" - ) - return self.spec - - upper_operator = self.upper_operator - upper_version = self.upper_version - if upper_operator is None or upper_version is None: - raise ValueError( - f"No upper version limit specified for {self.collection_name}: {self.spec}" - ) - - min_version = self.min_version - if min_version is None: - raise ValueError( - f"No minimum version specified for {self.collection_name}: {self.spec}" - ) - - if min_version.next_minor() <= upper_version: - upper_operator = "<" - upper_version = min_version.next_minor() - - new_clauses = sorted( - str(clause) - for clause in self.potential_clauses - if clause.target < upper_version - ) - new_clauses.append(f"{upper_operator}{upper_version}") - return ",".join(new_clauses) - - -def feature_freeze_version(spec: str, collection_name: str) -> str: - """ - Make semantic version range specification valid for feature freeze. - """ - return _FeatureFreezeVersion(spec, collection_name).compute_new_spec() - - def prepare_command() -> int: app_ctx = app_context.app_ctx.get() @@ -537,6 +440,11 @@ def prepare_command() -> int: ansible_core_version_obj = PypiVer(ansible_core_version) python_requires = _extract_python_requires(ansible_core_version_obj, deps) + constraints_filename = os.path.join( + app_ctx.extra["data_dir"], app_ctx.extra["constraints_file"] + ) + constraints = load_constraints_if_exists(constraints_filename) + # If we're building a feature frozen release (betas and rcs) then we need to # change the upper version limit to not include new features. if app_ctx.extra["feature_frozen"]: @@ -550,6 +458,7 @@ def prepare_command() -> int: ansible_core_version_obj, app_ctx.galaxy_url, ansible_core_allow_prerelease=_is_alpha(app_ctx.extra["ansible_version"]), + constraints=constraints, ) ) if new_ansible_core_version: diff --git a/src/antsibull/cli/antsibull_build.py b/src/antsibull/cli/antsibull_build.py index 678ac221..adb90f4e 100644 --- a/src/antsibull/cli/antsibull_build.py +++ b/src/antsibull/cli/antsibull_build.py @@ -120,15 +120,19 @@ def _normalize_new_release_options(args: argparse.Namespace) -> None: " per line" ) + compat_version_part = ( + f"{args.ansible_version.major}" + if args.ansible_version.major > 2 + else f"{args.ansible_version.major}.{args.ansible_version.minor}" + ) + if args.build_file is None: basename = os.path.basename(os.path.splitext(args.pieces_file)[0]) - if args.ansible_version.major > 2: - args.build_file = f"{basename}-{args.ansible_version.major}.build" - else: - args.build_file = ( - f"{basename}-{args.ansible_version.major}" - f".{args.ansible_version.minor}.build" - ) + args.build_file = f"{basename}-{compat_version_part}.build" + + if args.constraints_file is None: + basename = os.path.basename(os.path.splitext(args.pieces_file)[0]) + args.constraints_file = f"{basename}-{compat_version_part}.constraints" def _check_release_build_directories(args: argparse.Namespace) -> None: @@ -170,6 +174,10 @@ def _normalize_release_build_options(args: argparse.Namespace) -> None: " of versions per line" ) + if args.constraints_file is None: + basename = os.path.basename(os.path.splitext(args.build_file)[0]) + args.constraints_file = f"{basename}.constraints" + if args.deps_file is None: version_suffix = f"-{compat_version_part}" basename = os.path.basename(os.path.splitext(args.build_file)[0]) @@ -296,6 +304,14 @@ def parse_args(program_name: str, args: list[str]) -> argparse.Namespace: " --build-data-dir. The default is" " $BASENAME_OF_BUILD_FILE-X.Y.Z.deps", ) + build_step_parser.add_argument( + "--constraints-file", + default=None, + help="File containing a list of constraints for collections" + " included in Ansible. This is considered to be relative to" + " --build-data-dir. The default is" + " $BASENAME_OF_BUILD_FILE-X.Y.constraints", + ) feature_freeze_parser = argparse.ArgumentParser(add_help=False) feature_freeze_parser.add_argument( @@ -357,6 +373,14 @@ def parse_args(program_name: str, args: list[str]) -> argparse.Namespace: default=False, help="Allow prereleases of collections to be included in the build" " file", ) + new_parser.add_argument( + "--constraints-file", + default=None, + help="File containing a list of constraints for collections" + " included in Ansible. This is considered to be relative to" + " --build-data-dir. The default is" + " $BASENAME_OF_PIECES_FILE-X.Y.constraints", + ) prepare_parser = subparsers.add_parser( "prepare", diff --git a/src/antsibull/new_ansible.py b/src/antsibull/new_ansible.py index fc8bc088..56c07eeb 100644 --- a/src/antsibull/new_ansible.py +++ b/src/antsibull/new_ansible.py @@ -20,6 +20,7 @@ from packaging.version import Version as PypiVer from .changelog import ChangelogData +from .versions import load_constraints_if_exists def display_exception(loop, context): # pylint:disable=unused-argument @@ -66,21 +67,30 @@ def version_is_compatible( collection: str, version: semver.Version, allow_prereleases: bool = False, + constraint: semver.SimpleSpec | None = None, ): # Metadata for this is not currently implemented. So everything is rated as compatible # as long as it is no prerelease if version.prerelease and not allow_prereleases: return False + if constraint is not None and version not in constraint: + return False return True def find_latest_compatible( - ansible_core_version, raw_dependency_versions, allow_prereleases: bool = False + ansible_core_version, + raw_dependency_versions, + allow_prereleases: bool = False, + constraints: dict[str, semver.SimpleSpec] | None = None, ): # Note: ansible-core compatibility is not currently implemented. It will be a piece of # collection metadata that is present in the collection but may not be present in galaxy. # We'll have to figure that out once the pieces are finalized + if constraints is None: + constraints = {} + # Order versions reduced_versions = {} for dep, versions in raw_dependency_versions.items(): @@ -91,7 +101,11 @@ def find_latest_compatible( # Step through the versions to select the latest one which is compatible for version in versions: if version_is_compatible( - ansible_core_version, dep, version, allow_prereleases=allow_prereleases + ansible_core_version, + dep, + version, + allow_prereleases=allow_prereleases, + constraint=constraints.get(dep), ): reduced_versions[dep] = version break @@ -113,11 +127,17 @@ def new_ansible_command(): ] ansible_core_versions.sort(reverse=True, key=lambda ver_req: ver_req[0]) + constraints_filename = os.path.join( + app_ctx.extra["data_dir"], app_ctx.extra["constraints_file"] + ) + constraints = load_constraints_if_exists(constraints_filename) + ansible_core_version, python_requires = ansible_core_versions[0] dependencies = find_latest_compatible( ansible_core_version, dependencies, allow_prereleases=app_ctx.extra["allow_prereleases"], + constraints=constraints, ) build_filename = os.path.join( diff --git a/src/antsibull/versions.py b/src/antsibull/versions.py new file mode 100644 index 00000000..77c795d1 --- /dev/null +++ b/src/antsibull/versions.py @@ -0,0 +1,144 @@ +# Author: Felix Fontein +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or +# https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-FileCopyrightText: Ansible Project, 2023 +"""Handle version specific things.""" + +from __future__ import annotations + +import os + +from antsibull_core.dependency_files import parse_pieces_file +from semantic_version import SimpleSpec as SemVerSpec +from semantic_version import Version as SemVer + + +class _FeatureFreezeVersion: + """ + Helper for making semantic version range specification valid for feature freeze. + """ + + def __init__(self, spec: str, collection_name: str): + self.potential_clauses: list = [] + self.spec = spec + self.collection_name = collection_name + self.upper_operator: str | None = None + self.upper_version: SemVer | None = None + self.min_version: SemVer | None = None + self.pinned = False + + spec_obj = SemVerSpec(spec) + + # If there is a single clause, it's available as spec_obj.clause; + # multiple ones are available as spec_obj.clause.clauses. + try: + clauses = spec_obj.clause.clauses + except AttributeError: + clauses = [spec_obj.clause] + + self.clauses = clauses + for clause in clauses: + self._process_clause(clause) + + def _process_clause(self, clause) -> None: + """ + Process one clause of the version range specification. + """ + if clause.operator in ("<", "<="): + if self.upper_operator is not None: + raise ValueError( + f"Multiple upper version limits specified for {self.collection_name}:" + f" {self.spec}" + ) + self.upper_operator = clause.operator + self.upper_version = clause.target + # Omit the upper bound as we're replacing it + return + + if clause.operator == ">=": + # Save the lower bound so we can write out a new compatible version + if self.min_version is not None: + raise ValueError( + f"Multiple minimum versions specified for {self.collection_name}: {self.spec}" + ) + self.min_version = clause.target + + if clause.operator == ">": + raise ValueError( + f"Strict lower bound specified for {self.collection_name}: {self.spec}" + ) + + if clause.operator == "==": + self.pinned = True + + self.potential_clauses.append(clause) + + def compute_new_spec(self) -> str: + """ + Compute a new version range specification that only allows newer patch releases that also + match the original range specification. + """ + if self.pinned: + if len(self.clauses) > 1: + raise ValueError( + f"Pin combined with other clauses for {self.collection_name}: {self.spec}" + ) + return self.spec + + upper_operator = self.upper_operator + upper_version = self.upper_version + if upper_operator is None or upper_version is None: + raise ValueError( + f"No upper version limit specified for {self.collection_name}: {self.spec}" + ) + + min_version = self.min_version + if min_version is None: + raise ValueError( + f"No minimum version specified for {self.collection_name}: {self.spec}" + ) + + if min_version.next_minor() <= upper_version: + upper_operator = "<" + upper_version = min_version.next_minor() + + new_clauses = sorted( + str(clause) + for clause in self.potential_clauses + if clause.target < upper_version + ) + new_clauses.append(f"{upper_operator}{upper_version}") + return ",".join(new_clauses) + + +def feature_freeze_version(spec: str, collection_name: str) -> str: + """ + Make semantic version range specification valid for feature freeze. + """ + return _FeatureFreezeVersion(spec, collection_name).compute_new_spec() + + +def load_constraints_if_exists(filename: str) -> dict[str, SemVerSpec]: + """ + Load a constraints file, if it exists. + """ + result: dict[str, SemVerSpec] = {} + if not os.path.exists(filename): + return result + for line in parse_pieces_file(filename): + record = [entry.strip() for entry in line.split(":", 1)] + if len(record) < 2: + raise ValueError( + f'While parsing {filename}: record "{line}" is not of the form "collection: spec"' + ) + collection = record[0] + try: + constraint = SemVerSpec(record[1]) + except ValueError as exc: + raise ValueError( + f"While parsing {filename}: cannot parse constraint" + f' "{record[1]}" for collection {collection}: {exc}' + ) from exc + result[collection] = constraint + return result