diff --git a/.bumpversion.cfg b/.bumpversion.cfg index d6bfb7e09..b53652e67 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 2.22.0.dev +current_version = 3.0.0.dev commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+))? diff --git a/.ci/scripts/changelog.py b/.ci/scripts/changelog.py deleted file mode 100755 index 53cfe7644..000000000 --- a/.ci/scripts/changelog.py +++ /dev/null @@ -1,68 +0,0 @@ -import re -import os -import requests -from packaging.version import Version -from git import Repo - -repo = Repo(os.getcwd()) -heads = repo.git.ls_remote("--heads", "https://github.com/pulp/pulp_deb.git").split("\n") -branches = [h.split("/")[-1] for h in heads if re.search(r"^([0-9]+)\.([0-9]+)$", h.split("/")[-1])] -branches.sort(key=lambda ver: Version(ver), reverse=True) - - -def get_changelog(branch): - """ - Get changelog file for a given branch. - - """ - return requests.get( - f"https://raw.githubusercontent.com/pulp/pulp_deb/{branch}/CHANGES.rst" - ).text - - -def get_changelog_releases(changelog): - """ - Get all versions in changelog. - - """ - versions = re.findall(r"([0-9]+)\.([0-9]+)\.([0-9]+) \(", changelog) - return {".".join(v) for v in versions} - - -def get_changelog_entry(changelog, version): - """ - Get changelog entry for a given version. - - """ - entries = changelog.split(f"{version} (")[1].split("=====\n") - header = f"{version} ({entries[0]}=====\n" - text = "\n\n\n".join(entries[1].split("\n\n\n")[0:-1]) - return header + text + "\n\n\n" - - -main_changelog = get_changelog("main") -main_entries = get_changelog_releases(main_changelog) -entries_list = list(main_entries) -to_add = {} -for branch in branches: - changelog = get_changelog(branch) - entries = get_changelog_releases(changelog) - for entry in entries.difference(main_entries): - description = get_changelog_entry(changelog, entry) - entries_list.append(entry) - print(description) - to_add[entry] = description - -entries_list.sort(key=lambda ver: Version(ver), reverse=True) -for version in sorted(to_add, key=lambda ver: Version(ver)): - next_version = entries_list[entries_list.index(version) + 1] - new_changelog = main_changelog.split(f"{next_version} (")[0] + to_add[version] - new_changelog = new_changelog + f"{next_version} (" - new_changelog = new_changelog + main_changelog.split(f"{next_version} (")[1] - main_changelog = new_changelog - -with open("CHANGES.rst", "w") as f: - f.write(main_changelog) - -if to_add: - repo.git.commit("-m", "Update Changelog\n\n[noissue]", "CHANGES.rst") diff --git a/.ci/scripts/check_release.py b/.ci/scripts/check_release.py new file mode 100755 index 000000000..4739a7e26 --- /dev/null +++ b/.ci/scripts/check_release.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python + +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_deb' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template + +import argparse +import re +import os +import yaml +from tempfile import TemporaryDirectory +from packaging.version import Version +from git import Repo + +UPSTREAM_REMOTE = "https://github.com/pulp/pulp_deb.git" +DEFAULT_BRANCH = "main" +RELEASE_BRANCH_REGEX = r"^([0-9]+)\.([0-9]+)$" +Y_CHANGELOG_EXTS = [".feature", ".removal", ".deprecation"] +Z_CHANGELOG_EXTS = [".bugfix", ".doc", ".misc"] + + +def main(): + """Check which branches need a release.""" + parser = argparse.ArgumentParser() + parser.add_argument( + "--branches", + default="supported", + help="A comma separated list of branches to check for releases. Can also use keyword: " + "'supported'. Defaults to 'supported', see `ci_update_branches` in " + "`plugin_template.yml`.", + ) + opts = parser.parse_args() + + with TemporaryDirectory() as d: + # Clone from upstream to ensure we have updated branches & main + repo = Repo.clone_from(UPSTREAM_REMOTE, d, filter="blob:none") + heads = [h.split("/")[-1] for h in repo.git.ls_remote("--heads").split("\n")] + available_branches = [h for h in heads if re.search(RELEASE_BRANCH_REGEX, h)] + available_branches.sort(key=lambda ver: Version(ver)) + available_branches.append(DEFAULT_BRANCH) + + branches = opts.branches + if branches == "supported": + with open(f"{d}/template_config.yml", mode="r") as f: + tc = yaml.safe_load(f) + branches = tc["ci_update_branches"] + branches.append(DEFAULT_BRANCH) + else: + branches = branches.split(",") + + if diff := set(branches) - set(available_branches): + print(f"Supplied branches contains non-existent branches! {diff}") + exit(1) + + print(f"Checking for releases on branches: {branches}") + + releases = [] + for branch in branches: + if branch != DEFAULT_BRANCH: + # Check if a Z release is needed + changes = repo.git.ls_tree("-r", "--name-only", f"origin/{branch}", "CHANGES/") + z_release = False + for change in changes.split("\n"): + # Check each changelog file to make sure everything checks out + _, ext = os.path.splitext(change) + if ext in Y_CHANGELOG_EXTS: + print( + f"Warning: A non-backported changelog ({change}) is present in the " + f"{branch} release branch!" + ) + elif ext in Z_CHANGELOG_EXTS: + z_release = True + if z_release: + # Blobless clone does not have file contents for Z branches, + # check commit message for last Z bump + git_branch = f"origin/{branch}" + next_version = repo.git.log( + "--oneline", "--grep=Bump to", "-n 1", git_branch, "--", ".bumpversion.cfg" + ).split("to")[-1] + next_version = Version(next_version) + print( + f"A Z-release is needed for {branch}, " + f"New Version: {next_version.base_version}" + ) + releases.append(next_version) + else: + # Check if a Y release is needed + changes = repo.git.ls_tree("-r", "--name-only", DEFAULT_BRANCH, "CHANGES/") + for change in changes.split("\n"): + _, ext = os.path.splitext(change) + if ext in Y_CHANGELOG_EXTS: + # We don't put Y release bumps in the commit message, check file instead + # The 'current_version' is always the next version to release + next_version = repo.git.grep( + "current_version", DEFAULT_BRANCH, "--", ".bumpversion.cfg" + ).split("=")[-1] + next_version = Version(next_version) + print( + f"A new Y-release is needed! New Version: {next_version.base_version}" + ) + releases.append(next_version) + break + + if len(releases) == 0: + print("No new releases to perform.") + + +if __name__ == "__main__": + main() diff --git a/.ci/scripts/check_requirements.py b/.ci/scripts/check_requirements.py index 259bee3ed..bf03f82b1 100755 --- a/.ci/scripts/check_requirements.py +++ b/.ci/scripts/check_requirements.py @@ -42,12 +42,13 @@ else: errors.append(f"{filename}:{nr}: Unreadable requirement {line}") else: - if not line.startswith("opentelemetry"): - if check_prereleases and req.specifier.prereleases: - if req.name != "pulp-deb-client": - errors.append( - f"{filename}:{nr}: Prerelease versions found in {line}." - ) + if check_prereleases and req.specifier.prereleases: + # Do not even think about begging for more exceptions! + if ( + not req.name.startswith("opentelemetry") + and req.name != "pulp-deb-client" + ): + errors.append(f"{filename}:{nr}: Prerelease versions found in {line}.") ops = [op for op, ver in req.specs] spec = str(req.specs) if "~=" in ops: diff --git a/.ci/scripts/collect_changes.py b/.ci/scripts/collect_changes.py new file mode 100755 index 000000000..8a79931a4 --- /dev/null +++ b/.ci/scripts/collect_changes.py @@ -0,0 +1,101 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_deb' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template + +import itertools +import os +import re + +import toml +from git import GitCommandError, Repo +from pkg_resources import parse_version + +# Read Towncrier settings +tc_settings = toml.load("pyproject.toml")["tool"]["towncrier"] + +CHANGELOG_FILE = tc_settings.get("filename", "NEWS.rst") +START_STRING = tc_settings.get( + "start_string", + "\n" + if CHANGELOG_FILE.endswith(".md") + else ".. towncrier release notes start\n", +) +TITLE_FORMAT = tc_settings.get("title_format", "{name} {version} ({project_date})") + + +NAME_REGEX = r".*" +VERSION_REGEX = r"([0-9]+\.[0-9]+\.[0-9][0-9ab]*)" +DATE_REGEX = r"[0-9]{4}-[0-9]{2}-[0-9]{2}" +TITLE_REGEX = ( + "(" + + re.escape( + TITLE_FORMAT.format(name="NAME_REGEX", version="VERSION_REGEX", project_date="DATE_REGEX") + ) + .replace("NAME_REGEX", NAME_REGEX) + .replace("VERSION_REGEX", VERSION_REGEX) + .replace("DATE_REGEX", DATE_REGEX) + + ")" +) + + +def get_changelog(repo, branch): + return repo.git.show(f"{branch}:{CHANGELOG_FILE}") + "\n" + + +def _tokenize_changes(splits): + assert len(splits) % 3 == 0 + for i in range(len(splits) // 3): + title = splits[3 * i] + version = parse_version(splits[3 * i + 1]) + yield [version, title + splits[3 * i + 2]] + + +def split_changelog(changelog): + preamble, rest = changelog.split(START_STRING, maxsplit=1) + split_rest = re.split(TITLE_REGEX, rest) + return preamble + START_STRING + split_rest[0], list(_tokenize_changes(split_rest[1:])) + + +def main(): + repo = Repo(os.getcwd()) + remote = repo.remotes[0] + branches = [ref for ref in remote.refs if re.match(r"^([0-9]+)\.([0-9]+)$", ref.remote_head)] + branches.sort(key=lambda ref: parse_version(ref.remote_head), reverse=True) + branches = [ref.name for ref in branches] + + with open(CHANGELOG_FILE, "r") as f: + main_changelog = f.read() + preamble, main_changes = split_changelog(main_changelog) + old_length = len(main_changes) + + for branch in branches: + print(f"Looking at branch {branch}") + try: + changelog = get_changelog(repo, branch) + except GitCommandError: + print("No changelog found on this branch.") + continue + dummy, changes = split_changelog(changelog) + new_changes = sorted(main_changes + changes, key=lambda x: x[0], reverse=True) + # Now remove duplicates (retain the first one) + main_changes = [new_changes[0]] + for left, right in itertools.pairwise(new_changes): + if left[0] != right[0]: + main_changes.append(right) + + new_length = len(main_changes) + if old_length < new_length: + print(f"{new_length - old_length} new versions have been added.") + with open(CHANGELOG_FILE, "w") as fp: + fp.write(preamble) + for change in main_changes: + fp.write(change[1]) + + repo.git.commit("-m", "Update Changelog", "-m" "[noissue]", CHANGELOG_FILE) + + +if __name__ == "__main__": + main() diff --git a/.ci/scripts/update_ci_branches.py b/.ci/scripts/update_ci_branches.py deleted file mode 100755 index 617d8a4de..000000000 --- a/.ci/scripts/update_ci_branches.py +++ /dev/null @@ -1,25 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github pulp_deb' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import os -import sys -import requests - -branches = sys.argv[1:] - -headers = { - "Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}", - "Accept": "application/vnd.github.v3+json", -} - -github_api = "https://api.github.com" -workflow_path = "/actions/workflows/update_ci.yml/dispatches" -url = f"{github_api}/repos/pulp/pulp_deb{workflow_path}" - -for branch in branches: - print(f"Updating {branch}") - requests.post(url, headers=headers, json={"ref": branch}) diff --git a/.flake8 b/.flake8 index e84670485..df75c46e5 100644 --- a/.flake8 +++ b/.flake8 @@ -6,6 +6,8 @@ # For more info visit https://github.com/pulp/plugin_template [flake8] exclude = ./docs/*,*/migrations/* +per-file-ignores = */__init__.py: F401 + ignore = E203,W503,Q000,Q003,D100,D104,D106,D200,D205,D400,D401,D402 max-line-length = 100 diff --git a/.github/template_gitref b/.github/template_gitref index 046f010ac..c028a4451 100644 --- a/.github/template_gitref +++ b/.github/template_gitref @@ -1 +1 @@ -2021.08.26-214-gf2ffaa4 +2021.08.26-236-g91f4301 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7f08c937a..d3a634006 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,7 +193,7 @@ jobs: docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" deprecations: runs-on: ubuntu-latest - if: always() + if: github.base_ref == 'main' needs: test steps: - name: Fail on deprecations diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index fdf05657f..dbca55f51 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -4,7 +4,7 @@ # './plugin-template --github pulp_deb' to update this file. # # For more info visit https://github.com/pulp/plugin_template -name: "CodeQL" +name: "Deb CodeQL" on: workflow_dispatch: diff --git a/.github/workflows/create-branch.yml b/.github/workflows/create-branch.yml index 372ea4189..f7c877987 100644 --- a/.github/workflows/create-branch.yml +++ b/.github/workflows/create-branch.yml @@ -63,7 +63,11 @@ jobs: git checkout main bump2version --no-commit minor - - name: Make a PR with version bump + - name: Remove entries from CHANGES directory + run: | + find CHANGES -type f -regex ".*\.\(bugfix\|doc\|feature\|misc\|deprecation\|removal\)" -exec git rm {} + + + - name: Make a PR with version bump and without CHANGES/* uses: peter-evans/create-pull-request@v4 with: token: ${{ secrets.RELEASE_TOKEN }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index b89af597d..2cea0d538 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -6,7 +6,7 @@ # For more info visit https://github.com/pulp/plugin_template --- -name: Deb Nightly CI/CD +name: Deb Nightly CI on: schedule: # * is a special character in YAML so you have to quote this string @@ -170,7 +170,7 @@ jobs: steps: - uses: actions/checkout@v3 with: - fetch-depth: 1 + fetch-depth: 0 - uses: actions/setup-python@v3 with: @@ -179,7 +179,7 @@ jobs: - name: Install python dependencies run: | echo ::group::PYDEPS - pip install gitpython requests packaging + pip install gitpython toml echo ::endgroup:: - name: Configure Git with pulpbot name and email @@ -187,23 +187,16 @@ jobs: git config --global user.name 'pulpbot' git config --global user.email 'pulp-infra@redhat.com' - - name: Changelog history - run: python .ci/scripts/changelog.py + - name: Collect changes from all branches + run: python .ci/scripts/collect_changes.py - name: Create Pull Request uses: peter-evans/create-pull-request@v4 with: token: ${{ secrets.RELEASE_TOKEN }} - committer: pulpbot - author: pulpbot title: 'Update Changelog' - body: '[noissue]' + body: '' branch: 'changelog/update' - base: main - commit-message: | - Update Changelog - - [noissue] delete-branch: true publish: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e09847bcb..d8b38009f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,7 @@ # For more info visit https://github.com/pulp/plugin_template --- -name: Release Pipeline +name: Deb Release Pipeline on: workflow_dispatch: inputs: @@ -15,7 +15,7 @@ on: required: true before_script: description: | - Bash code to run before script.sh is executed. This should only be used when re-running + Bash code to run before bindings and docs are built. This should only be used when re-running a workflow to correct some aspect of the docs. e.g.: git checkout origin/3.14 CHANGES.rst required: false @@ -41,7 +41,7 @@ jobs: - name: Install python dependencies run: | echo ::group::PYDEPS - pip install packaging~=21.3 bandersnatch bump2version gitpython towncrier==19.9.0 wheel + pip install packaging~=21.3 bump2version gitpython towncrier==19.9.0 wheel requests echo ::endgroup:: - name: Configure Git with pulpbot name and email @@ -65,21 +65,14 @@ jobs: with: name: pulp_deb.tar path: pulp_deb.tar - test: - needs: build-artifacts - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - env: - - TEST: pulp - - TEST: docs - - TEST: azure - - TEST: s3 - - TEST: generate-bindings - - TEST: lowerbounds + build-bindings-docs: + needs: build-artifacts + runs-on: ubuntu-latest + # Install scripts expect TEST to be set, 'docs' is most appropriate even though we don't run tests + env: + TEST: docs steps: - uses: actions/download-artifact@v3 @@ -112,10 +105,8 @@ jobs: echo ::endgroup:: echo "HTTPIE_CONFIG_DIR=$GITHUB_WORKSPACE/.ci/assets/httpie/" >> $GITHUB_ENV - - name: Set environment variables - run: | - echo "TEST=${{ matrix.env.TEST }}" >> $GITHUB_ENV - + # Building the bindings and docs requires accessing the OpenAPI specs endpoint, so we need to + # setup the Pulp instance. - name: Before Install run: .github/workflows/scripts/before_install.sh shell: bash @@ -144,74 +135,41 @@ jobs: GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} shell: bash - - name: Before Script - run: .github/workflows/scripts/before_script.sh + - name: Additional before_script + run: ${{ github.event.inputs.before_script }} shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - REDIS_DISABLED: ${{ contains('', matrix.env.TEST) }} - - - name: Setting secrets - run: python3 .github/workflows/scripts/secrets.py "$SECRETS_CONTEXT" - env: - SECRETS_CONTEXT: ${{ toJson(secrets) }} - name: Install Python client run: .github/workflows/scripts/install_python_client.sh shell: bash - - name: Install Ruby client - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} run: .github/workflows/scripts/install_ruby_client.sh shell: bash - - name: Additional before_script - run: ${{ github.event.inputs.before_script }} - shell: bash - - - name: Script - if: ${{ env.TEST != 'generate-bindings' }} - run: .github/workflows/scripts/script.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - name: Upload python client packages - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} uses: actions/upload-artifact@v3 with: name: python-client.tar path: python-client.tar - name: Upload python client docs - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} uses: actions/upload-artifact@v3 with: name: python-client-docs.tar path: python-client-docs.tar - - name: Upload ruby client packages - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} uses: actions/upload-artifact@v3 with: name: ruby-client.tar path: ruby-client.tar + - name: Build docs + run: | + export DJANGO_SETTINGS_MODULE=pulpcore.app.settings + export PULP_SETTINGS=$PWD/.ci/ansible/settings/settings.py + make -C docs/ PULP_URL="https://pulp" diagrams html + tar -cvf docs/docs.tar docs/_build + - name: Upload built docs - if: ${{ env.TEST == 'docs' }} uses: actions/upload-artifact@v3 with: name: docs.tar @@ -232,7 +190,7 @@ jobs: publish: runs-on: ubuntu-latest - needs: test + needs: build-bindings-docs env: TEST: publish @@ -293,7 +251,7 @@ jobs: - name: Publish docs to pulpproject.org run: | - tar -xvf docs.tar -C ./docs + tar -xvf docs.tar .github/workflows/scripts/publish_docs.sh tag ${{ github.event.inputs.release }} - name: Deploy plugin to pypi run: bash .github/workflows/scripts/publish_plugin_pypi.sh ${{ github.event.inputs.release }} @@ -335,25 +293,3 @@ jobs: - name: Create release on GitHub run: bash .github/workflows/scripts/create_release_from_tag.sh ${{ github.event.inputs.release }} - - - name: Cleanup repository before making changelog PR - run: rm -rf .lock generation pulp_deb_client* *-client.tar pulp_deb.tar todo web *docs.tar - - - name: Stage changelog for main branch - run: python .github/workflows/scripts/stage-changelog-for-default-branch.py ${{ github.event.inputs.release }} - - - name: Create Pull Request for Changelog - uses: peter-evans/create-pull-request@v4 - with: - token: ${{ secrets.RELEASE_TOKEN }} - committer: pulpbot - author: pulpbot - branch: changelog/${{ github.event.inputs.release }} - base: main - title: 'Cherry pick ${{ github.event.inputs.release }} changelog' - body: '[noissue]' - commit-message: | - ${{ github.event.inputs.release }} changelog - - [noissue] - delete-branch: true diff --git a/.github/workflows/scripts/release.py b/.github/workflows/scripts/release.py index a9f6474d5..a9b046df9 100755 --- a/.github/workflows/scripts/release.py +++ b/.github/workflows/scripts/release.py @@ -6,53 +6,35 @@ # For more info visit https://github.com/pulp/plugin_template import argparse -import asyncio import re import os -import shutil import textwrap - -from bandersnatch.mirror import BandersnatchMirror -from bandersnatch.master import Master -from bandersnatch.configuration import BandersnatchConfig +import requests from git import Repo - -from packaging.requirements import Requirement from pathlib import Path -async def get_package_from_pypi(package_name, plugin_path): +def get_package_from_pypi(version, plugin_path): """ Download a package from PyPI. - :param name: name of the package to download from PyPI - :return: String path to the package + :param version: version of the package to download from PyPI + :return: True/False if download was successful """ - config = BandersnatchConfig().config - config["mirror"]["master"] = "https://pypi.org" - config["mirror"]["workers"] = "1" - config["mirror"]["directory"] = plugin_path - if not config.has_section("plugins"): - config.add_section("plugins") - config["plugins"]["enabled"] = "blocklist_release\n" - if not config.has_section("allowlist"): - config.add_section("allowlist") - config["plugins"]["enabled"] += "allowlist_release\nallowlist_project\n" - config["allowlist"]["packages"] = "\n".join([package_name]) os.makedirs(os.path.join(plugin_path, "dist"), exist_ok=True) - async with Master("https://pypi.org/") as master: - mirror = BandersnatchMirror(homedir=plugin_path, master=master) - name = Requirement(package_name).name - result = await mirror.synchronize([name]) - package_found = False - - for package in result[name]: - current_path = os.path.join(plugin_path, package) - destination_path = os.path.join(plugin_path, "dist", os.path.basename(package)) - shutil.move(current_path, destination_path) - package_found = True - return package_found + r = requests.get(f"https://pypi.org/pypi/pulp-deb/{version}/json") + if r.status_code == 200: + metadata = r.json() + for url_data in metadata["urls"]: + filename = url_data["filename"] + r2 = requests.get(url_data["url"]) + if r2.status_code != 200: + raise RuntimeError(f"Failed to download released artifact {filename}") + with open(os.path.join(plugin_path, "dist", filename), "wb") as f: + f.write(r2.content) + return True + return False def create_release_commits(repo, release_version, plugin_path): @@ -74,7 +56,7 @@ def create_release_commits(repo, release_version, plugin_path): # Second commit: release version os.system("bump2version release --allow-dirty") - git.add(f"{plugin_path}/{plugin_name}/*") + git.add(f"{plugin_path}/pulp_deb/*") git.add(f"{plugin_path}/docs/conf.py") git.add(f"{plugin_path}/setup.py") git.add(f"{plugin_path}/requirements.txt") @@ -93,7 +75,7 @@ def create_release_commits(repo, release_version, plugin_path): if not new_dev_version: raise RuntimeError("Could not detect new dev version ... aborting.") - git.add(f"{plugin_path}/{plugin_name}/*") + git.add(f"{plugin_path}/pulp_deb/*") git.add(f"{plugin_path}/docs/conf.py") git.add(f"{plugin_path}/setup.py") git.add(f"{plugin_path}/requirements.txt") @@ -132,76 +114,76 @@ def create_tag_and_build_package(repo, desired_tag, commit_sha, plugin_path): repo.head.reset(index=True, working_tree=True) # Check if Package is available on PyPI - loop = asyncio.get_event_loop() # noqa - # fmt: off - package_found = asyncio.run( - get_package_from_pypi(f"pulp-deb=={tag.name}", plugin_path) - ) # noqa - # fmt: on - if not package_found: + if not get_package_from_pypi(tag.name, plugin_path): os.system("python3 setup.py sdist bdist_wheel --python-tag py3") -helper = textwrap.dedent( - """\ - Start the release process. +def main(): + helper = textwrap.dedent( + """\ + Start the release process. - Example: - setup.py on plugin before script: - version="2.0.0.dev" + Example: + setup.py on plugin before script: + version="2.0.0.dev" - $ python .ci/scripts/release.py + $ python .ci/scripts/release.py - setup.py on plugin after script: - version="2.0.1.dev" + setup.py on plugin after script: + version="2.0.1.dev" - """ -) -parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=helper) - -parser.add_argument( - "release_version", - type=str, - help="The version string for the release.", -) - -args = parser.parse_args() - -release_version_arg = args.release_version - -release_path = os.path.dirname(os.path.abspath(__file__)) -plugin_path = release_path.split("/.github")[0] - -plugin_name = "pulp_deb" -version = None -with open(f"{plugin_path}/setup.py") as fp: - for line in fp.readlines(): - if "version=" in line: - version = re.split("\"|'", line)[1] - if not version: - raise RuntimeError("Could not detect existing version ... aborting.") -release_version = version.replace(".dev", "") - -print(f"\n\nRepo path: {plugin_path}") -repo = Repo(plugin_path) - -release_commit = None -if release_version != release_version_arg: - # Look for a commit with the requested release version - for commit in repo.iter_commits(): - if f"Release {release_version_arg}\n" in commit.message: - release_commit = commit - release_version = release_version_arg - break + """ + ) + parser = argparse.ArgumentParser( + formatter_class=argparse.RawTextHelpFormatter, description=helper + ) + + parser.add_argument( + "release_version", + type=str, + help="The version string for the release.", + ) + + args = parser.parse_args() + + release_version_arg = args.release_version + + release_path = os.path.dirname(os.path.abspath(__file__)) + plugin_path = release_path.split("/.github")[0] + + version = None + with open(f"{plugin_path}/setup.py") as fp: + for line in fp.readlines(): + if "version=" in line: + version = re.split("\"|'", line)[1] + if not version: + raise RuntimeError("Could not detect existing version ... aborting.") + release_version = version.replace(".dev", "") + + print(f"\n\nRepo path: {plugin_path}") + repo = Repo(plugin_path) + + release_commit = None + if release_version != release_version_arg: + # Look for a commit with the requested release version + for commit in repo.iter_commits(): + if f"Release {release_version_arg}\n" in commit.message: + release_commit = commit + release_version = release_version_arg + break + if not release_commit: + raise RuntimeError( + f"The release version {release_version_arg} does not match the .dev version at " + "HEAD. A release commit for such version does not exist." + ) + if not release_commit: - raise RuntimeError( - f"The release version {release_version_arg} does not match the .dev version at HEAD. " - "A release commit for such version does not exist." - ) - -if not release_commit: - release_commit_sha = create_release_commits(repo, release_version, plugin_path) -else: - release_commit_sha = release_commit.hexsha -create_tag_and_build_package(repo, release_version, release_commit_sha, plugin_path) + release_commit_sha = create_release_commits(repo, release_version, plugin_path) + else: + release_commit_sha = release_commit.hexsha + create_tag_and_build_package(repo, release_version, release_commit_sha, plugin_path) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/scripts/update_backport_labels.py b/.github/workflows/scripts/update_backport_labels.py new file mode 100755 index 000000000..26b223213 --- /dev/null +++ b/.github/workflows/scripts/update_backport_labels.py @@ -0,0 +1,55 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_deb' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template + +import requests +import yaml +import random +import os + + +def random_color(): + """Generates a random 24-bit number in hex""" + color = random.randrange(0, 2**24) + return format(color, "06x") + + +session = requests.Session() +token = os.getenv("GITHUB_TOKEN") + +headers = { + "Authorization": f"token {token}", + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", +} +session.headers.update(headers) + +# get all labels from the repository's current state +response = session.get("https://api.github.com/repos/pulp/pulp_deb/labels", headers=headers) +assert response.status_code == 200 +old_labels = set([x["name"] for x in response.json() if x["name"].startswith("backport-")]) + +# get ci_update_branches from template_config.yml +with open("./template_config.yml", "r") as f: + plugin_template = yaml.safe_load(f) +new_labels = set(["backport-" + x for x in plugin_template["ci_update_branches"]]) + +# delete old labels that are not in new labels +for label in old_labels.difference(new_labels): + response = session.delete( + f"https://api.github.com/repos/pulp/pulp_deb/labels/{label}", headers=headers + ) + assert response.status_code == 204 + +# create new labels that are not in old labels +for label in new_labels.difference(old_labels): + color = random_color() + response = session.post( + "https://api.github.com/repos/pulp/pulp_deb/labels", + headers=headers, + json={"name": label, "color": color}, + ) + assert response.status_code == 201 diff --git a/.github/workflows/scripts/update_ci.sh b/.github/workflows/scripts/update_ci.sh deleted file mode 100755 index 0f84808ce..000000000 --- a/.github/workflows/scripts/update_ci.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -set -eu - -if [ ! -d ../plugin_template ]; then - echo "Checking out plugin_template" - git clone https://github.com/pulp/plugin_template.git ../plugin_template -fi - - -if [ ! -f "template_config.yml" ]; then - echo "No template_config.yml detected." - exit 1 -fi - -pushd ../plugin_template -pip install -r test_requirements.txt -./plugin-template --github pulp_deb -popd - -# Check if only gitref file has changed, so no effect on CI workflows. -if [[ `git diff --name-only` == ".github/template_gitref" ]]; then - echo "CI update has no effect on workflows, skipping PR creation." - git stash - exit 0 -fi - -if [[ `git status --porcelain` ]]; then - git add -A - git commit -m "Update CI files" -m "[noissue]" -else - echo "No updates needed" -fi diff --git a/.github/workflows/update-labels.yml b/.github/workflows/update-labels.yml new file mode 100644 index 000000000..56e3257a6 --- /dev/null +++ b/.github/workflows/update-labels.yml @@ -0,0 +1,39 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_deb' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template + + +--- +name: Deb Update Labels +on: + push: + branches: + - main + paths: + - 'template_config.yml' + +jobs: + update_backport_labels: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v3 + with: + python-version: "3.8" + - name: Configure Git with pulpbot name and email + run: | + git config --global user.name 'pulpbot' + git config --global user.email 'pulp-infra@redhat.com' + - name: Install python dependencies + run: | + echo ::group::PYDEPS + pip install requests pyyaml + echo ::endgroup:: + - uses: actions/checkout@v3 + - name: Update labels + run: | + python3 .github/workflows/scripts/update_backport_labels.py + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} diff --git a/.github/workflows/update_ci.yml b/.github/workflows/update_ci.yml index 916a7451a..b06471284 100644 --- a/.github/workflows/update_ci.yml +++ b/.github/workflows/update_ci.yml @@ -7,7 +7,7 @@ --- -name: CI Update +name: Deb CI Update on: schedule: # * is a special character in YAML so you have to quote this string @@ -15,11 +15,6 @@ on: - cron: '30 2 * * 0' workflow_dispatch: - inputs: - all_branches: - description: "Run on all branches" - default: 'no' - required: false jobs: update: @@ -31,7 +26,9 @@ jobs: steps: - uses: actions/checkout@v3 with: - fetch-depth: 1 + repository: pulp/plugin_template + path: plugin_template + fetch-depth: 0 - uses: actions/setup-python@v3 with: @@ -47,29 +44,82 @@ jobs: run: | git config --global user.name 'pulpbot' git config --global user.email 'pulp-infra@redhat.com' + - uses: actions/checkout@v3 + with: + path: pulp_deb + ref: 'main' + fetch-depth: 0 + + - name: Run update + working-directory: pulp_deb + run: | + ../plugin_template/scripts/update_ci.sh - - name: Set short_ref - id: vars - run: echo short_ref=${GITHUB_REF#refs/*/} >> $GITHUB_OUTPUT - - name: Dispatching - if: github.event_name == 'schedule' || github.event.inputs.all_branches == 'yes' - run: python .ci/scripts/update_ci_branches.py 2.18 2.19 2.20 2.21 - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} + - name: Create Pull Request for CI files + uses: peter-evans/create-pull-request@v4 + with: + token: ${{ secrets.RELEASE_TOKEN }} + path: pulp_deb + committer: pulpbot + author: pulpbot + title: 'Update CI files for branch main' + body: '[noissue]' + branch: 'update-ci/main' + base: 'main' + commit-message: | + Update CI files + + [noissue] + delete-branch: true + - uses: actions/checkout@v3 + with: + path: pulp_deb + ref: '2.20' + fetch-depth: 0 + + - name: Run update + working-directory: pulp_deb + run: | + ../plugin_template/scripts/update_ci.sh + + - name: Create Pull Request for CI files + uses: peter-evans/create-pull-request@v4 + with: + token: ${{ secrets.RELEASE_TOKEN }} + path: pulp_deb + committer: pulpbot + author: pulpbot + title: 'Update CI files for branch 2.20' + body: '[noissue]' + branch: 'update-ci/2.20' + base: '2.20' + commit-message: | + Update CI files + + [noissue] + delete-branch: true + - uses: actions/checkout@v3 + with: + path: pulp_deb + ref: '2.21' + fetch-depth: 0 - name: Run update + working-directory: pulp_deb run: | - .github/workflows/scripts/update_ci.sh + ../plugin_template/scripts/update_ci.sh - name: Create Pull Request for CI files uses: peter-evans/create-pull-request@v4 with: token: ${{ secrets.RELEASE_TOKEN }} + path: pulp_deb committer: pulpbot author: pulpbot - title: 'Update CI files from ${{ steps.vars.outputs.short_ref }} branch' + title: 'Update CI files for branch 2.21' body: '[noissue]' - branch: 'create-pull-request/${{ steps.vars.outputs.short_ref }}/patch' + branch: 'update-ci/2.21' + base: '2.21' commit-message: | Update CI files diff --git a/CHANGES.rst b/CHANGES.rst index 8c3586d7e..f9c7f8bb2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1033,6 +1033,15 @@ Features +---- + + +2.0.0b4 (2020-01-14) +==================== + +No significant changes. + + ---- diff --git a/CHANGES/449.feature b/CHANGES/449.feature new file mode 100644 index 000000000..bdff9ebd7 --- /dev/null +++ b/CHANGES/449.feature @@ -0,0 +1,3 @@ +Added ``version``, ``origin``, ``label``, and ``description`` fields to Releases. +These fields can be set when creating new Releases via the API. +Going forward, they will also be synced from upstream release files if present. diff --git a/CHANGES/449.removal b/CHANGES/449.removal new file mode 100644 index 000000000..c1219a11f --- /dev/null +++ b/CHANGES/449.removal @@ -0,0 +1,2 @@ +Since release file fields including "Label" and "Version", are now synced from upstream repositories, we have dropped the PUBLISH_RELEASE_FILE_LABEL and PUBLISH_RELEASE_FILE_VERSION settings. +This removes the ability to publish Pulp internal "Label" and "Version" values that never made much sense, and had been disabled by default since at least pulp_deb 2.18.0. diff --git a/CHANGES/599.misc b/CHANGES/599.misc new file mode 100644 index 000000000..973edc8eb --- /dev/null +++ b/CHANGES/599.misc @@ -0,0 +1,2 @@ +This change includes a large DB migration to drop 'codename' and 'suite' from the uniqueness constraints of all structure content. +The migration will merge any resulting collisions and alter all records with a foreign key relation to the so eliminated content to point at the merge result instead. diff --git a/CHANGES/599.removal b/CHANGES/599.removal new file mode 100644 index 000000000..f24345f87 --- /dev/null +++ b/CHANGES/599.removal @@ -0,0 +1 @@ +The codename and suite fields are removed from the ReleaseComponent and ReleaseArchitecture models and all associated filters and viewsets. diff --git a/CHANGES/777.bugfix b/CHANGES/777.bugfix new file mode 100644 index 000000000..269578f5f --- /dev/null +++ b/CHANGES/777.bugfix @@ -0,0 +1,2 @@ +Fixed KeyError during publish if package has architecture that's not supported in the Packages file. +Instead, a warning message will be logged. diff --git a/CHANGES/780.misc b/CHANGES/780.misc new file mode 100644 index 000000000..92bcbdb67 --- /dev/null +++ b/CHANGES/780.misc @@ -0,0 +1 @@ +Add tests for content filters, and make filters return empty list if Content not in RepoVersion instead of raising ValidationError. \ No newline at end of file diff --git a/CHANGES/790.misc b/CHANGES/790.misc new file mode 100644 index 000000000..94b055021 --- /dev/null +++ b/CHANGES/790.misc @@ -0,0 +1 @@ +Added better scoping for the pytest fixtures. diff --git a/CHANGES/793.feature b/CHANGES/793.feature new file mode 100644 index 000000000..671be1fa8 --- /dev/null +++ b/CHANGES/793.feature @@ -0,0 +1,3 @@ +Added the ``publish_upstream_release_fields`` field to the repository model. +To avoid a breaking change in publication behaviour, existing repositories are populated with the setting set to ``False``, while any newly created repostiroies will default to ``True``. +Whatever the value on the repository, it can be overriden when creating a new publication. diff --git a/CHANGES/797.bugfix b/CHANGES/797.bugfix new file mode 100644 index 000000000..12627d740 --- /dev/null +++ b/CHANGES/797.bugfix @@ -0,0 +1 @@ +Fixed an async error preventing synchronization with ``sync_installer`` set to ``True``. diff --git a/CHANGES/806.bugfix b/CHANGES/806.bugfix new file mode 100644 index 000000000..e514ba22a --- /dev/null +++ b/CHANGES/806.bugfix @@ -0,0 +1 @@ +Fixed content creating code triggered in rare edge cases when unapplying DB migration 0021. diff --git a/CHANGES/807.bugfix b/CHANGES/807.bugfix new file mode 100644 index 000000000..25ca89740 --- /dev/null +++ b/CHANGES/807.bugfix @@ -0,0 +1,2 @@ +Fixed a bug where structured package upload was only working as intended for the first package uploaded to each repository. +Also added logging and ensured structure content is added to the creating tasks ``created_resources`` list. diff --git a/docs/_scripts/structured_repo.sh b/docs/_scripts/structured_repo.sh index 1821a6b04..5d9799f59 100755 --- a/docs/_scripts/structured_repo.sh +++ b/docs/_scripts/structured_repo.sh @@ -18,10 +18,10 @@ TASK_HREF=$(http ${BASE_ADDR}/pulp/api/v3/distributions/deb/apt/ name=myrepo bas wait_until_task_finished $BASE_ADDR$TASK_HREF # create the necessary content (release, comp, architecture) -RELEASE_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/releases/ codename=mycodename suite=mysuite distribution=mydist | jq -r .pulp_href) +RELEASE_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/releases/ distribution=mydist codename=mycodename suite=mysuite | jq -r .pulp_href) # Note that creating the release is optional, but without it your published repo will use default values for the suite and the codename in the published Release file. -ARCH_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/release_architectures/ architecture=ppc64 codename=mycodename suite=mysuite distribution=mydist | jq -r .pulp_href) -COMP_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/release_components/ component=mycomp codename=mycodename suite=mysuite distribution=mydist | jq -r .pulp_href) +ARCH_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/release_architectures/ distribution=mydist architecture=ppc64 | jq -r .pulp_href) +COMP_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/release_components/ distribution=mydist component=mycomp | jq -r .pulp_href) PKG_COMP_HREF=$(http ${BASE_ADDR}/pulp/api/v3/content/deb/package_release_components/ package=$PACKAGE_HREF release_component=$COMP_HREF | jq -r .pulp_href) # add our content to the repository diff --git a/docs/conf.py b/docs/conf.py index ae010092f..d4af1bcc7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,9 +55,9 @@ # built documents. # # The short X.Y version. -version = "2.22.0.dev" +version = "3.0.0.dev" # The full version, including alpha/beta/rc tags. -release = "2.22.0.dev" +release = "3.0.0.dev" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pulp_deb/app/__init__.py b/pulp_deb/app/__init__.py index fb7f19bc5..ca076ab61 100644 --- a/pulp_deb/app/__init__.py +++ b/pulp_deb/app/__init__.py @@ -6,5 +6,5 @@ class PulpDebPluginAppConfig(PulpPluginAppConfig): name = "pulp_deb.app" label = "deb" - version = "2.22.0.dev" + version = "3.0.0.dev" python_package_name = "pulp_deb" diff --git a/pulp_deb/app/constants.py b/pulp_deb/app/constants.py index e458c754f..7d9608156 100644 --- a/pulp_deb/app/constants.py +++ b/pulp_deb/app/constants.py @@ -14,3 +14,6 @@ PACKAGE_UPLOAD_DEFAULT_DISTRIBUTION = "pulp" PACKAGE_UPLOAD_DEFAULT_COMPONENT = "upload" + +# Represents null values since nulls can't be used in unique indexes in postgres < 15 +NULL_VALUE = "__!!!NULL VALUE!!!__" diff --git a/pulp_deb/app/migrations/0021_remove_release_from_structure_types.py b/pulp_deb/app/migrations/0021_remove_release_from_structure_types.py index 145e04e8d..44166e8a2 100644 --- a/pulp_deb/app/migrations/0021_remove_release_from_structure_types.py +++ b/pulp_deb/app/migrations/0021_remove_release_from_structure_types.py @@ -53,12 +53,12 @@ def reassociate_with_release_for_model(release_model, model_to_associate): ) except release_model.DoesNotExist: # We do not batch for this rare edge case! - release = Release( + release = Release.objects.create( + pulp_type='deb.release', codename=content.codename, distribution=content.distribution, suite=content.suite, ) - release.save() content.release = release content_to_update.append(content) diff --git a/pulp_deb/app/migrations/0024_add_release_fields.py b/pulp_deb/app/migrations/0024_add_release_fields.py new file mode 100644 index 000000000..76aa4e8b2 --- /dev/null +++ b/pulp_deb/app/migrations/0024_add_release_fields.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.1 on 2023-06-01 07:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('deb', '0023_add_default_signing_services'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='release', + unique_together=set(), + ), + migrations.AddField( + model_name='release', + name='description', + field=models.TextField(default='__!!!NULL VALUE!!!__'), + ), + migrations.AddField( + model_name='release', + name='label', + field=models.TextField(default='__!!!NULL VALUE!!!__'), + ), + migrations.AddField( + model_name='release', + name='origin', + field=models.TextField(default='__!!!NULL VALUE!!!__'), + ), + migrations.AddField( + model_name='release', + name='version', + field=models.TextField(default='__!!!NULL VALUE!!!__'), + ), + migrations.AlterUniqueTogether( + name='release', + unique_together={('codename', 'suite', 'distribution', 'version', 'origin', 'label', 'description')}, + ), + ] diff --git a/pulp_deb/app/migrations/0025_merge_colliding_structure_content.py b/pulp_deb/app/migrations/0025_merge_colliding_structure_content.py new file mode 100644 index 000000000..951d1a275 --- /dev/null +++ b/pulp_deb/app/migrations/0025_merge_colliding_structure_content.py @@ -0,0 +1,315 @@ +# Generated by Django 3.2.19 on 2023-05-09 12:35, extended manually; + +import logging + +from datetime import datetime + +from django.db import migrations, models +from django.core.exceptions import ObjectDoesNotExist + +BATCH_SIZE = 1000 + +log = logging.getLogger(__name__) + + +def merge_colliding_structure_content(apps, schema_editor): + ReleaseArchitecture = apps.get_model('deb', 'ReleaseArchitecture') + ReleaseComponent = apps.get_model('deb', 'ReleaseComponent') + PackageReleaseComponent = apps.get_model('deb', 'PackageReleaseComponent') + RepositoryContent = apps.get_model('core', 'RepositoryContent') + RepositoryVersion = apps.get_model('core', 'RepositoryVersion') + + print("\n") + log.info("{}: Starting data migration!".format(datetime.now())) + + def _get_content_repo_version_set(repo_version_set, repo_content): + version_added = repo_content.version_added.number + if repo_content.version_removed: + version_removed = repo_content.version_added.number + else: + version_removed = max(repo_version_set) + 1 + return set([n for n in repo_version_set if version_added <= n < version_removed]) + + def _get_repo_content_to_update(duplicate_content_ids, content_to_keep): + # Note that len(duplicate_content_ids) is expected to be much smaller than BATCH_SIZE. + # We don't care if the batch is up to len(duplicate_content_ids) larger than BATCH_SIZE. + repo_content_to_update = [] + for duplicate_content in RepositoryContent.objects.filter( + content_id__in=duplicate_content_ids + ): + repo_version_set = set( + RepositoryVersion.objects.filter( + repository_id=duplicate_content.repository_id + ).values_list('number', flat=True) + ) + for keep_content in RepositoryContent.objects.filter( + content_id=content_to_keep, repository_id=duplicate_content.repository_id + ): + if not keep_content.version_removed and not duplicate_content.version_removed: + # Neither repo_content was ever removed. + first_added = min( + keep_content.version_added.number, + duplicate_content.version_added.number, + ) + if keep_content.version_added.number != first_added: + keep_content.version_added = duplicate_content.version_added + keep_content.save() + message = '{}: Merging repo_content "{}" into "{}".' + log.info( + message.format( + datetime.now(), duplicate_content.pulp_id, keep_content.pulp_id + ) + ) + duplicate_content.delete() # Does this work? + duplicate_content = keep_content + elif keep_content.version_removed and duplicate_content.version_removed: + # Both repo_contents were rmoved at some point. + versions1 = _get_content_repo_version_set(repo_version_set, keep_content) + versions2 = _get_content_repo_version_set(repo_version_set, duplicate_content) + if versions1.intersection(versions2): + # The two repo_content overlap. + joint_version_range = versions1.union(versions2) + first_added = min(joint_version_range) + last_removed = max(joint_version_range) + if keep_content.version_added.number != first_added: + keep_content.version_added = duplicate_content.version_added + if keep_content.version_removed.number != last_removed: + keep_content.version_removed = duplicate_content.version_removed + message = '{}: Merging repo_content "{}" into "{}".' + log.info( + message.format( + datetime.now(), duplicate_content.pulp_id, keep_content.pulp_id + ) + ) + keep_content.save() + duplicate_content.delete() # Does this work? + duplicate_content = keep_content + else: + # Exactly one repo_content has already been removed + versions1 = _get_content_repo_version_set(repo_version_set, keep_content) + versions2 = _get_content_repo_version_set(repo_version_set, duplicate_content) + if versions1.intersection(versions2): + # The two repo_content overlap. + first_added = min(versions1.union(versions2)) + if keep_content.version_added.number != first_added: + keep_content.version_added = duplicate_content.version_added + if keep_content.version_removed: + keep_content.version_removed = None + message = '{}: Merging repo_content "{}" into "{}".' + log.info( + message.format( + datetime.now(), duplicate_content.pulp_id, keep_content.pulp_id + ) + ) + keep_content.save() + duplicate_content.delete() # Does this work? + duplicate_content = keep_content + + duplicate_content.content_id = content_to_keep + repo_content_to_update.append(duplicate_content) + + return repo_content_to_update + + def _deduplicate_PRC(duplicate_component, component_to_keep): + duplicate_prcs = PackageReleaseComponent.objects.filter( + release_component=duplicate_component + ) + repo_content_to_update = [] + for duplicate_prc in duplicate_prcs.iterator(chunk_size=BATCH_SIZE): + try: + prc_to_keep = PackageReleaseComponent.objects.get( + release_component=component_to_keep, + package=duplicate_prc.package, + ) + except ObjectDoesNotExist: + component = ReleaseComponent.objects.get(pk=component_to_keep) + prc_to_keep = PackageReleaseComponent.objects.create( + pulp_type='deb.package_release_component', + release_component=component, + package=duplicate_prc.package, + ) + + repo_content_to_update += _get_repo_content_to_update( + [duplicate_prc.pk], prc_to_keep.pk + ) + + if len(repo_content_to_update) >= BATCH_SIZE: + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + repo_content_to_update = [] + message = '{}: Merged PRC batch from duplicate component "{}" into component "{}"!' + log.info(message.format(datetime.now(), duplicate_component, component_to_keep)) + + # Handle remaining content <= BATCH_SIZE: + if len(repo_content_to_update) > 0: + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + + PackageReleaseComponent.objects.filter(pk__in=duplicate_prcs).delete() + + # Deduplicate ReleaseArchitecture: + distributions = ( + ReleaseArchitecture.objects.all() + .distinct('distribution') + .values_list('distribution', flat=True) + ) + + for distribution in distributions: + architectures = ( + ReleaseArchitecture.objects.filter(distribution=distribution) + .distinct('architecture') + .values_list('architecture', flat=True) + ) + architecture_ids_to_delete = [] + repo_content_to_update = [] + for architecture in architectures: + duplicate_architecture_ids = list( + ReleaseArchitecture.objects.filter( + distribution=distribution, architecture=architecture + ).values_list('pk', flat=True) + ) + if len(duplicate_architecture_ids) > 1: + architecture_to_keep = duplicate_architecture_ids.pop() + message = ( + '{}: Merging duplicates for architecture "{}" in distribution "{}" into ' + 'ReleaseArchitecture "{}"!' + ) + log.info( + message.format(datetime.now(), architecture, distribution, architecture_to_keep) + ) + architecture_ids_to_delete += duplicate_architecture_ids + repo_content_to_update += _get_repo_content_to_update( + duplicate_architecture_ids, architecture_to_keep + ) + + if len(architecture_ids_to_delete) >= BATCH_SIZE: + # We assume len(repo_content_to_update)==len(architecture_ids_to_delete)! + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + repo_content_to_update = [] + + ReleaseArchitecture.objects.filter(pk__in=architecture_ids_to_delete).delete() + architecture_ids_to_delete = [] + + # Handle remaining content <= BATCH_SIZE: + if len(repo_content_to_update) > 0: + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + + if len(architecture_ids_to_delete) > 0: + ReleaseArchitecture.objects.filter(pk__in=architecture_ids_to_delete).delete() + + # Deduplicate ReleaseComponent: + distributions = ( + ReleaseComponent.objects.all() + .distinct('distribution') + .values_list('distribution', flat=True) + ) + for distribution in distributions: + components = ( + ReleaseComponent.objects.filter(distribution=distribution) + .distinct('component') + .values_list('component', flat=True) + ) + component_ids_to_delete = [] + repo_content_to_update = [] + for component in components: + duplicate_component_ids = list( + ReleaseComponent.objects.filter( + distribution=distribution, component=component + ).order_by('-pulp_created').values_list('pk', flat=True) + ) + if len(duplicate_component_ids) > 1: + component_to_keep = duplicate_component_ids.pop() + message = ( + '{}: Merging duplicates for component "{}" in distribution "{}" into ' + 'ReleaseComponent "{}"!' + ) + log.info(message.format(datetime.now(), component, distribution, component_to_keep)) + component_ids_to_delete += duplicate_component_ids + repo_content_to_update += _get_repo_content_to_update( + duplicate_component_ids, component_to_keep + ) + + # Deduplicate PackageReleaseComponents + for duplicate_component in duplicate_component_ids: + message = ( + '{}: Handling PackageReleaseComponents for duplicate ReleaseComponent "{}"!' + ) + log.info(message.format(datetime.now(), duplicate_component)) + _deduplicate_PRC(duplicate_component, component_to_keep) + + if len(component_ids_to_delete) >= BATCH_SIZE: + # We assume len(repo_content_to_update)==len(component_ids_to_delete)! + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + repo_content_to_update = [] + + ReleaseComponent.objects.filter(pk__in=component_ids_to_delete).delete() + component_ids_to_delete = [] + + # Handle remaining content <= BATCH_SIZE: + if len(repo_content_to_update) > 0: + RepositoryContent.objects.bulk_update(repo_content_to_update, ["content_id"]) + + if len(component_ids_to_delete) > 0: + ReleaseComponent.objects.filter(pk__in=component_ids_to_delete).delete() + + log.info("{}: Data migration completed!\n".format(datetime.now())) + + +class Migration(migrations.Migration): + dependencies = [ + ('deb', '0024_add_release_fields'), + ] + + operations = [ + migrations.RunPython( + merge_colliding_structure_content, reverse_code=migrations.RunPython.noop, elidable=True + ), + migrations.RunSQL( + sql="SET CONSTRAINTS ALL IMMEDIATE;", + reverse_sql="", + ), + migrations.AlterUniqueTogether( + name='releasearchitecture', + unique_together={('distribution', 'architecture')}, + ), + migrations.AlterUniqueTogether( + name='releasecomponent', + unique_together={('distribution', 'component')}, + ), + # Give a default value to fields for the sake of back migrating + migrations.AlterField( + model_name='releasearchitecture', + name='codename', + field=models.TextField(default=''), + ), + migrations.AlterField( + model_name='releasearchitecture', + name='suite', + field=models.TextField(default=''), + ), + migrations.AlterField( + model_name='releasecomponent', + name='codename', + field=models.TextField(default=''), + ), + migrations.AlterField( + model_name='releasecomponent', + name='suite', + field=models.TextField(default=''), + ), + # Before dropping the fields for good! + migrations.RemoveField( + model_name='releasearchitecture', + name='codename', + ), + migrations.RemoveField( + model_name='releasearchitecture', + name='suite', + ), + migrations.RemoveField( + model_name='releasecomponent', + name='codename', + ), + migrations.RemoveField( + model_name='releasecomponent', + name='suite', + ), + ] diff --git a/pulp_deb/app/migrations/0026_aptrepository_publish_upstream_release_fields.py b/pulp_deb/app/migrations/0026_aptrepository_publish_upstream_release_fields.py new file mode 100644 index 000000000..8f2bba59d --- /dev/null +++ b/pulp_deb/app/migrations/0026_aptrepository_publish_upstream_release_fields.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.2 on 2023-06-20 07:58 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('deb', '0025_merge_colliding_structure_content'), + ] + + operations = [ + # create the field with a default of False so that the field is populated as False for + # existing models but then set the default to True for new repositories going forward + migrations.AddField( + model_name='aptrepository', + name='publish_upstream_release_fields', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='aptrepository', + name='publish_upstream_release_fields', + field=models.BooleanField(default=True), + ), + ] diff --git a/pulp_deb/app/models/content/metadata.py b/pulp_deb/app/models/content/metadata.py index d5aa51ec9..4809a4b10 100644 --- a/pulp_deb/app/models/content/metadata.py +++ b/pulp_deb/app/models/content/metadata.py @@ -9,6 +9,8 @@ from pulpcore.plugin.models import Content +from pulp_deb.app.constants import NULL_VALUE + class Release(Content): """ @@ -22,9 +24,15 @@ class Release(Content): codename = models.TextField() suite = models.TextField() distribution = models.TextField() + version = models.TextField(default=NULL_VALUE) + origin = models.TextField(default=NULL_VALUE) + label = models.TextField(default=NULL_VALUE) + description = models.TextField(default=NULL_VALUE) repo_key_fields = ("distribution",) class Meta: default_related_name = "%(app_label)s_%(model_name)s" - unique_together = (("codename", "suite", "distribution"),) + unique_together = ( + ("codename", "suite", "distribution", "version", "origin", "label", "description"), + ) diff --git a/pulp_deb/app/models/content/structure_content.py b/pulp_deb/app/models/content/structure_content.py index 85d486a4e..61e5a9d66 100644 --- a/pulp_deb/app/models/content/structure_content.py +++ b/pulp_deb/app/models/content/structure_content.py @@ -36,18 +36,12 @@ class ReleaseArchitecture(Content): TYPE = "release_architecture" - architecture = models.TextField() distribution = models.TextField() - - # IMPORTANT: The following fields are only part of this model in order to avoid historical - # uniqueness constraint collisions. The plan is to drop these fields from this model ASAP! This - # will require a complex DB migration to sort out any collisions. - codename = models.TextField() - suite = models.TextField() + architecture = models.TextField() class Meta: default_related_name = "%(app_label)s_%(model_name)s" - unique_together = (("architecture", "distribution", "codename", "suite"),) + unique_together = (("distribution", "architecture"),) class ReleaseComponent(Content): @@ -63,12 +57,6 @@ class ReleaseComponent(Content): distribution = models.TextField() component = models.TextField() - # IMPORTANT: The following fields are only part of this model in order to avoid historical - # uniqueness constraint collisions. The plan is to drop these fields from this model ASAP! This - # will require a complex DB migration to sort out any collisions. - codename = models.TextField() - suite = models.TextField() - @property def plain_component(self): """ @@ -87,7 +75,7 @@ def plain_component(self): class Meta: default_related_name = "%(app_label)s_%(model_name)s" - unique_together = (("distribution", "component", "codename", "suite"),) + unique_together = (("distribution", "component"),) class PackageReleaseComponent(Content): diff --git a/pulp_deb/app/models/repository.py b/pulp_deb/app/models/repository.py index b214cde35..258f0286b 100644 --- a/pulp_deb/app/models/repository.py +++ b/pulp_deb/app/models/repository.py @@ -40,6 +40,8 @@ class AptRepository(Repository): AptRemote, ] + publish_upstream_release_fields = models.BooleanField(default=True) + signing_service = models.ForeignKey( AptReleaseSigningService, on_delete=models.PROTECT, null=True ) diff --git a/pulp_deb/app/serializers/content_serializers.py b/pulp_deb/app/serializers/content_serializers.py index cfc45b009..f3673116f 100644 --- a/pulp_deb/app/serializers/content_serializers.py +++ b/pulp_deb/app/serializers/content_serializers.py @@ -1,13 +1,11 @@ -from contextlib import suppress from gettext import gettext as _ import os from debian import deb822, debfile -from django.db import IntegrityError from rest_framework.serializers import CharField, DictField, Field, ValidationError, Serializer -from pulpcore.plugin.models import Artifact, RemoteArtifact +from pulpcore.plugin.models import Artifact, CreatedResource, RemoteArtifact from pulpcore.plugin.serializers import ( ContentChecksumSerializer, MultipleArtifactContentSerializer, @@ -21,6 +19,7 @@ PACKAGE_UPLOAD_DEFAULT_DISTRIBUTION, ) +from pulp_deb.app.constants import NULL_VALUE from pulp_deb.app.models import ( BasePackage, GenericContent, @@ -69,6 +68,42 @@ def to_internal_value(self, data): raise ValidationError('Value must be "yes" or "no".') +class NullableCharField(CharField): + """ + A serializer that accepts null values but saves them as the NULL_VALUE str. + """ + + def to_representation(self, value): + """ + Translate str to str or None. + """ + if value == NULL_VALUE: + return None + else: + return value + + def to_internal_value(self, data): + """ + Translate None to NULL_VALUE str. + """ + if data is None: + return NULL_VALUE + else: + return data + + def validate_empty_values(self, data): + """ + Translate None to NULL_VALUE str. + + This is needed when user input is not set, it defaults to None and the to_internal_value + method never gets called. + """ + (is_empty_value, data) = super().validate_empty_values(data) + if is_empty_value and data is None: + return is_empty_value, NULL_VALUE + return is_empty_value, data + + class GenericContentSerializer(SingleArtifactContentUploadSerializer, ContentChecksumSerializer): """ A serializer for GenericContent. @@ -102,12 +137,12 @@ class ReleaseFileSerializer(MultipleArtifactContentSerializer): A serializer for ReleaseFile. """ - codename = CharField(help_text='Codename of the release, i.e. "buster".', required=False) + codename = CharField(help_text='Codename of the release, e.g. "buster".', required=False) - suite = CharField(help_text='Suite of the release, i.e. "stable".', required=False) + suite = CharField(help_text='Suite of the release, e.g. "stable".', required=False) distribution = CharField( - help_text='Distribution of the release, i.e. "stable/updates".', required=True + help_text='Distribution of the release, e.g. "stable/updates".', required=True ) relative_path = CharField(help_text="Path of file relative to url.", required=False) @@ -181,6 +216,13 @@ class SinglePackageUploadSerializer(SingleArtifactContentUploadSerializer): distribution = CharField(help_text="Name of the distribution.", required=False) component = CharField(help_text="Name of the component.", required=False) + @staticmethod + def _get_or_create_content_and_qs(model, **data): + content, created = model.objects.get_or_create(**data) + if created: + CreatedResource(content_object=content).save() + return content, model.objects.filter(pk=content.pk) + def create(self, validated_data): distribution = ( validated_data.pop("distribution", None) @@ -194,42 +236,34 @@ def create(self, validated_data): ) if validated_data.get("repository"): - repository = validated_data.pop("repository", None) + repository = validated_data.pop("repository") repository.cast() - result = super().create(validated_data) - content_to_add = self.Meta.model.objects.filter(pk=result.pk) - with suppress(IntegrityError): - release_component = ReleaseComponent(distribution=distribution, component=component) - release_component.save() - release_component_to_add = ReleaseComponent.objects.filter( - distribution=distribution, component=component, codename="", suite="" - ) - package = content_to_add[0] - release_arch = ReleaseArchitecture( - distribution=distribution, architecture=package.architecture - ) - release_arch.save() - release_arch_to_add = ReleaseArchitecture.objects.filter( - distribution=distribution, architecture=package.architecture - ) - package_release = PackageReleaseComponent( - release_component=release_component, package=package - ) - package_release.save() - package_release_to_add = PackageReleaseComponent.objects.filter( - release_component=release_component, package=package - ) - with repository.new_version() as new_version: - new_version.add_content(content_to_add) - new_version.add_content(release_component_to_add) - new_version.add_content(release_arch_to_add) - new_version.add_content(package_release_to_add) + package = super().create(validated_data) + package_qs = self.Meta.model.objects.filter(pk=package.pk) + + message = _('Adding uploaded package "{}" to component "{}" of distribution "{}".') + log.info(message.format(package.name, component, distribution)) - return result + component, component_qs = self._get_or_create_content_and_qs( + ReleaseComponent, distribution=distribution, component=component + ) + architecture_qs = self._get_or_create_content_and_qs( + ReleaseArchitecture, distribution=distribution, architecture=package.architecture + )[1] + prc_qs = self._get_or_create_content_and_qs( + PackageReleaseComponent, release_component=component, package=package + )[1] + + with repository.new_version() as new_version: + new_version.add_content(package_qs) + new_version.add_content(component_qs) + new_version.add_content(architecture_qs) + new_version.add_content(prc_qs) + else: + package = super().create(validated_data) - result = super().create(validated_data) - return result + return package class Meta(SingleArtifactContentUploadSerializer.Meta): fields = SingleArtifactContentUploadSerializer.Meta.fields + ("distribution", "component") @@ -660,10 +694,22 @@ class ReleaseSerializer(NoArtifactContentSerializer): codename = CharField() suite = CharField() distribution = CharField() + version = NullableCharField(required=False, allow_null=True, default=None) + origin = NullableCharField(required=False, allow_null=True, default=None) + label = NullableCharField(required=False, allow_null=True, default=None) + description = NullableCharField(required=False, allow_null=True, default=None) class Meta(NoArtifactContentSerializer.Meta): model = Release - fields = NoArtifactContentSerializer.Meta.fields + ("codename", "suite", "distribution") + fields = NoArtifactContentSerializer.Meta.fields + ( + "codename", + "suite", + "distribution", + "version", + "origin", + "label", + "description", + ) class ReleaseArchitectureSerializer(NoArtifactContentSerializer): @@ -679,8 +725,6 @@ class Meta(NoArtifactContentSerializer.Meta): fields = NoArtifactContentSerializer.Meta.fields + ( "architecture", "distribution", - "codename", - "suite", ) @@ -697,8 +741,6 @@ class Meta(NoArtifactContentSerializer.Meta): fields = NoArtifactContentSerializer.Meta.fields + ( "component", "distribution", - "codename", - "suite", ) diff --git a/pulp_deb/app/serializers/publication_serializers.py b/pulp_deb/app/serializers/publication_serializers.py index 7357c1d2a..638b953fa 100644 --- a/pulp_deb/app/serializers/publication_serializers.py +++ b/pulp_deb/app/serializers/publication_serializers.py @@ -35,6 +35,7 @@ class AptPublicationSerializer(PublicationSerializer): default=False, ) structured = BooleanField(help_text="Activate structured publishing mode.", default=False) + publish_upstream_release_fields = BooleanField(help_text="", required=False) signing_service = RelatedField( help_text="Sign Release files with this signing key", many=False, @@ -53,7 +54,12 @@ def validate(self, data): return data class Meta: - fields = PublicationSerializer.Meta.fields + ("simple", "structured", "signing_service") + fields = PublicationSerializer.Meta.fields + ( + "simple", + "structured", + "signing_service", + "publish_upstream_release_fields", + ) model = AptPublication diff --git a/pulp_deb/app/serializers/repository_serializers.py b/pulp_deb/app/serializers/repository_serializers.py index c6266ce03..4264d2837 100644 --- a/pulp_deb/app/serializers/repository_serializers.py +++ b/pulp_deb/app/serializers/repository_serializers.py @@ -43,6 +43,19 @@ class AptRepositorySerializer(RepositorySerializer): A Serializer for AptRepository. """ + publish_upstream_release_fields = serializers.BooleanField( + help_text=_( + "Previously, pulp_deb only synced the Release file fields codename and suite, now " + "version, origin, label, and description are also synced. Setting this setting to " + "False will make Pulp revert to the old behaviour of using it's own internal values " + "for the new fields during publish. This is primarily intended to avoid a sudden " + "change in behaviour for existing Pulp repositories, since many Release file field " + "changes need to be accepted by hosts consuming the published repository. The default " + "for new repositories is True." + ), + required=False, + ) + signing_service = RelatedField( help_text="A reference to an associated signing service. Used if " "AptPublication.signing_service is not set", @@ -64,6 +77,7 @@ class AptRepositorySerializer(RepositorySerializer): class Meta: fields = RepositorySerializer.Meta.fields + ( + "publish_upstream_release_fields", "signing_service", "signing_service_release_overrides", ) diff --git a/pulp_deb/app/tasks/publishing.py b/pulp_deb/app/tasks/publishing.py index e2fdc4360..cea4a6a4b 100644 --- a/pulp_deb/app/tasks/publishing.py +++ b/pulp_deb/app/tasks/publishing.py @@ -21,6 +21,7 @@ RepositoryVersion, ) +from pulp_deb.app.constants import NULL_VALUE from pulp_deb.app.models import ( AptPublication, AptRepository, @@ -67,7 +68,13 @@ def publish_verbatim(repository_version_pk): log.info(_("Publication (verbatim): {publication} created").format(publication=publication.pk)) -def publish(repository_version_pk, simple=False, structured=False, signing_service_pk=None): +def publish( + repository_version_pk, + simple=False, + structured=False, + signing_service_pk=None, + publish_upstream_release_fields=None, +): """ Use provided publisher to create a Publication based on a RepositoryVersion. @@ -105,8 +112,14 @@ def publish(repository_version_pk, simple=False, structured=False, signing_servi repository = AptRepository.objects.get(pk=repo_version.repository.pk) if simple: - codename = "default" - distribution = "default" + release = Release( + distribution="default", + codename="default", + origin="Pulp 3", + ) + if repository.description: + release.description = repository.description + component = "all" architectures = ( Package.objects.filter( @@ -118,15 +131,12 @@ def publish(repository_version_pk, simple=False, structured=False, signing_servi architectures = list(architectures) if "all" not in architectures: architectures.append("all") + release_helper = _ReleaseHelper( publication=publication, - codename=codename, - distribution=distribution, + release=release, components=[component], architectures=architectures, - description=repository.description, - label=repository.name, - version=str(repo_version.number), signing_service=repository.signing_service, ) @@ -170,11 +180,30 @@ def publish(repository_version_pk, simple=False, structured=False, signing_servi pk__in=repo_version.content.order_by("-pulp_created"), distribution=distribution, ).first() + publish_upstream = ( + publish_upstream_release_fields + if publish_upstream_release_fields is not None + else repository.publish_upstream_release_fields + ) if not release: codename = distribution.strip("/").split("/")[0] release = Release( - distribution=distribution, codename=codename, suite=codename + distribution=distribution, + codename=codename, + suite=codename, + origin="Pulp 3", + ) + if repository.description: + release.description = repository.description + elif not publish_upstream: + release = Release( + distribution=release.distribution, + codename=release.codename, + suite=release.suite, + origin="Pulp 3", ) + if repository.description: + release.description = repository.description release_components = ReleaseComponent.objects.filter( pk__in=repo_version.content.order_by("-pulp_created"), @@ -183,16 +212,12 @@ def publish(repository_version_pk, simple=False, structured=False, signing_servi components = list( release_components.distinct("component").values_list("component", flat=True) ) + release_helper = _ReleaseHelper( publication=publication, - codename=release.codename, - distribution=distribution, components=components, architectures=architectures, - description=repository.description, - label=repository.name, - version=str(repo_version.number), - suite=release.suite, + release=release, signing_service=repository.release_signing_service(release), ) @@ -249,10 +274,19 @@ def add_package(self, package): published_artifact.save() package_serializer = Package822Serializer(package, context={"request": None}) - package_serializer.to822(self.component).dump( - self.package_index_files[package.architecture][0] - ) - self.package_index_files[package.architecture][0].write(b"\n") + + try: + package_serializer.to822(self.component).dump( + self.package_index_files[package.architecture][0] + ) + except KeyError: + log.warn( + f"Published package '{package.relative_path}' with architecture " + f"'{package.architecture}' was not added to component '{self.component}' in " + f"distribution '{self.parent.distribution}' because it lacks this architecture!" + ) + else: + self.package_index_files[package.architecture][0].write(b"\n") def finish(self): # Publish Packages files @@ -288,18 +322,13 @@ class _ReleaseHelper: def __init__( self, publication, - codename, - distribution, components, architectures, - label, - version, - description=None, - suite=None, + release, signing_service=None, ): self.publication = publication - self.distribution = distribution + self.distribution = distribution = release.distribution self.dists_subfolder = distribution.strip("/") if distribution != "/" else "flat-repo" if distribution[-1] == "/": message = "Using dists subfolder '{}' for structured publish of originally flat repo!" @@ -308,21 +337,22 @@ def __init__( # published Release file. As a "nice to have" for human readers, we try to use # the same order of fields that official Debian repositories use. self.release = deb822.Release() - self.release["Origin"] = "Pulp 3" - if settings.PUBLISH_RELEASE_FILE_LABEL: - self.release["Label"] = label - if suite: - self.release["Suite"] = suite - if settings.PUBLISH_RELEASE_FILE_VERSION: - self.release["Version"] = version - if not codename: - codename = distribution.split("/")[0] if distribution != "/" else "flat-repo" - self.release["Codename"] = codename + if release.origin != NULL_VALUE: + self.release["Origin"] = release.origin + if release.label != NULL_VALUE: + self.release["Label"] = release.label + if release.suite: + self.release["Suite"] = release.suite + if release.version != NULL_VALUE: + self.release["Version"] = release.version + if not release.codename: + release.codename = distribution.split("/")[0] if distribution != "/" else "flat-repo" + self.release["Codename"] = release.codename self.release["Date"] = datetime.now(tz=timezone.utc).strftime("%a, %d %b %Y %H:%M:%S %z") self.release["Architectures"] = " ".join(architectures) self.release["Components"] = "" # Will be set later - if description: - self.release["Description"] = description + if release.description != NULL_VALUE: + self.release["Description"] = release.description for checksum_type, deb_field in CHECKSUM_TYPE_MAP.items(): if checksum_type in settings.ALLOWED_CONTENT_CHECKSUMS: diff --git a/pulp_deb/app/tasks/synchronizing.py b/pulp_deb/app/tasks/synchronizing.py index fbefbffbd..e5ccac9a9 100644 --- a/pulp_deb/app/tasks/synchronizing.py +++ b/pulp_deb/app/tasks/synchronizing.py @@ -635,15 +635,28 @@ async def _handle_distribution(self, distribution): await pb.aincrement() return - # For historic reasons, we have tied up the distribution with the codename and suite. - # Untangling this will require careful planning due to changing uniqueness constraints. - # See: https://github.com/pulp/pulp_deb/issues/599 - distribution_dict = { + # Parse release file + log.info(_('Parsing Release file at distribution="{}"').format(distribution)) + release_artifact = await _get_main_artifact_blocking(release_file) + release_file_dict = deb822.Release(release_artifact.file) + + release_fields = { "codename": release_file.codename, "suite": release_file.suite, "distribution": distribution, } - await self.put(DeclarativeContent(content=Release(**distribution_dict))) + + if "version" in release_file_dict: + release_fields["version"] = release_file_dict["Version"] + if "origin" in release_file_dict: + release_fields["origin"] = release_file_dict["Origin"] + if "label" in release_file_dict: + release_fields["label"] = release_file_dict["Label"] + if "description" in release_file_dict: + release_fields["description"] = release_file_dict["Description"] + + await self.put(DeclarativeContent(content=Release(**release_fields))) + # Create release architectures if release_file.architectures: architectures = _filter_split_architectures( @@ -659,13 +672,9 @@ async def _handle_distribution(self, distribution): for architecture in architectures: release_architecture_dc = DeclarativeContent( - content=ReleaseArchitecture(architecture=architecture, **distribution_dict) + content=ReleaseArchitecture(architecture=architecture, distribution=distribution) ) await self.put(release_architecture_dc) - # Parse release file - log.info(_('Parsing Release file at distribution="{}"').format(distribution)) - release_artifact = await _get_main_artifact_blocking(release_file) - release_file_dict = deb822.Release(release_artifact.file) # Retrieve and interpret any 'No-Support-for-Architecture-all' value: # We will refer to the presence of 'No-Support-for-Architecture-all: Packages' in a Release @@ -690,13 +699,13 @@ async def _handle_distribution(self, distribution): if distribution[-1] == "/": # Handle flat repo - sub_tasks = [self._handle_flat_repo(file_references, release_file, distribution_dict)] + sub_tasks = [self._handle_flat_repo(file_references, release_file, distribution)] else: # Handle components sub_tasks = [ self._handle_component( component, - distribution_dict, + distribution, release_file, file_references, architectures, @@ -711,7 +720,7 @@ async def _handle_distribution(self, distribution): async def _handle_component( self, component, - distribution_dict, + distribution, release_file, file_references, architectures, @@ -719,7 +728,7 @@ async def _handle_component( ): # Create release_component release_component_dc = DeclarativeContent( - content=ReleaseComponent(component=component, **distribution_dict) + content=ReleaseComponent(component=component, distribution=distribution) ) release_component = await self._create_unit(release_component_dc) @@ -799,10 +808,10 @@ async def _handle_component( raise NotImplementedError("Syncing source repositories is not yet implemented.") await asyncio.gather(*pending_tasks) - async def _handle_flat_repo(self, file_references, release_file, distribution_dict): + async def _handle_flat_repo(self, file_references, release_file, distribution): # We are creating a component so the flat repo can be published as a structured repo! release_component_dc = DeclarativeContent( - content=ReleaseComponent(component="flat-repo-component", **distribution_dict) + content=ReleaseComponent(component="flat-repo-component", distribution=distribution) ) release_component = await self._create_unit(release_component_dc) pending_tasks = [] @@ -814,7 +823,7 @@ async def _handle_flat_repo(self, file_references, release_file, distribution_di release_component=release_component, architecture="", file_references=file_references, - distribution_dict=distribution_dict, + distribution=distribution, ) ) @@ -832,7 +841,7 @@ async def _handle_package_index( architecture, file_references, infix="", - distribution_dict=None, + distribution=None, hybrid_format=False, ): # Create package_index @@ -1021,7 +1030,7 @@ async def _handle_package_index( log.warning(_(message).format(architecture)) release_architecture_dc = DeclarativeContent( content=ReleaseArchitecture( - architecture=architecture, **distribution_dict + architecture=architecture, distribution=distribution ) ) await self.put(release_architecture_dc) @@ -1039,7 +1048,9 @@ async def _handle_package_index( log.warning(_(message).format(package_architectures_string)) for architecture in package_architectures: release_architecture_dc = DeclarativeContent( - content=ReleaseArchitecture(architecture=architecture, **distribution_dict) + content=ReleaseArchitecture( + architecture=architecture, distribution=distribution + ) ) await self.put(release_architecture_dc) @@ -1075,13 +1086,13 @@ async def _handle_installer_file_index( deferred_download = self.remote.policy != Remote.IMMEDIATE # Parse installer file index file_list = defaultdict(dict) - for content_artifact in installer_file_index.contentartifact_set.all(): + async for content_artifact in installer_file_index.contentartifact_set.all(): algorithm = InstallerFileIndex.FILE_ALGORITHM.get( os.path.basename(content_artifact.relative_path) ) if not algorithm: continue - for line in content_artifact.artifact.file: + for line in await _get_content_artifact_file(content_artifact): digest, filename = line.decode().strip().split(maxsplit=1) filename = os.path.normpath(filename) if filename in InstallerFileIndex.FILE_ALGORITHM: # strangely they may appear here @@ -1123,6 +1134,11 @@ async def _handle_translation_files(self, release_file, release_component, file_ ) +@sync_to_async +def _get_content_artifact_file(content_artifact): + return content_artifact.artifact.file + + @sync_to_async def _readd_previous_package_indices(previous_version, new_version, distribution): new_version.add_content( diff --git a/pulp_deb/app/viewsets/content.py b/pulp_deb/app/viewsets/content.py index 3cea68471..73b870f57 100644 --- a/pulp_deb/app/viewsets/content.py +++ b/pulp_deb/app/viewsets/content.py @@ -130,9 +130,8 @@ def filter(self, qs, value): arg_instance = NamedModelViewSet.get_resource(arg_href, self.ARG_CLASS) if not repo_version.content.filter(pk=arg_instance.pk).exists(): - raise ValidationError( - detail=_("Specified filter argument is not in specified RepositoryVersion") - ) + # If the package (or whatever) is not in the repo version then return an empty list. + return qs.none() return self._filter(qs, arg_instance, repo_version.content) @@ -375,7 +374,7 @@ class ReleaseFilter(ContentFilter): class Meta: model = models.Release - fields = ["codename", "suite", "distribution"] + fields = ["codename", "suite", "distribution", "version", "label", "origin"] class ReleaseViewSet(ContentViewSet): @@ -407,7 +406,7 @@ class ReleaseArchitectureFilter(ContentFilter): class Meta: model = models.ReleaseArchitecture - fields = ["architecture", "distribution", "codename", "suite"] + fields = ["architecture", "distribution"] class ReleaseArchitectureViewSet(ContentViewSet): @@ -446,7 +445,7 @@ class ReleaseComponentFilter(ContentFilter): class Meta: model = models.ReleaseComponent - fields = ["component", "distribution", "codename", "suite"] + fields = ["component", "distribution"] class ReleaseComponentViewSet(ContentViewSet): diff --git a/pulp_deb/app/viewsets/publication.py b/pulp_deb/app/viewsets/publication.py index cade9858b..db099bd7d 100644 --- a/pulp_deb/app/viewsets/publication.py +++ b/pulp_deb/app/viewsets/publication.py @@ -84,6 +84,9 @@ def create(self, request): simple = serializer.validated_data.get("simple") structured = serializer.validated_data.get("structured") signing_service = serializer.validated_data.get("signing_service") + publish_upstream_release_fields = serializer.validated_data.get( + "publish_upstream_release_fields" + ) result = dispatch( func=tasks.publish, @@ -93,6 +96,7 @@ def create(self, request): "simple": simple, "structured": structured, "signing_service_pk": getattr(signing_service, "pk", None), + "publish_upstream_release_fields": publish_upstream_release_fields, }, ) return OperationPostponedResponse(result, request) diff --git a/pulp_deb/tests/functional/api/test_download_content.py b/pulp_deb/tests/functional/api/test_download_content.py index 4b4925a10..314158284 100644 --- a/pulp_deb/tests/functional/api/test_download_content.py +++ b/pulp_deb/tests/functional/api/test_download_content.py @@ -18,6 +18,7 @@ @pytest.mark.parametrize("is_verbatim", [False, True]) def test_download_content( deb_distribution_factory, + deb_get_fixture_server_url, deb_publication_factory, deb_remote_factory, deb_repository_factory, @@ -34,7 +35,8 @@ def test_download_content( """ # Create repository, remote and sync them repo = deb_repository_factory() - remote = deb_remote_factory(DEB_FIXTURE_STANDARD_REPOSITORY_NAME) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url) deb_sync_repository(remote, repo) repo = deb_get_repository_by_href(repo.pulp_href) diff --git a/pulp_deb/tests/functional/api/test_download_policies.py b/pulp_deb/tests/functional/api/test_download_policies.py index dc3ba3fd9..56e2e1ac3 100644 --- a/pulp_deb/tests/functional/api/test_download_policies.py +++ b/pulp_deb/tests/functional/api/test_download_policies.py @@ -12,6 +12,7 @@ @pytest.mark.parametrize("policy", ["on_demand", "streamed"]) def test_download_policy( apt_package_api, + deb_get_fixture_server_url, deb_get_repository_by_href, deb_publication_factory, deb_remote_factory, @@ -24,7 +25,8 @@ def test_download_policy( orphans_cleanup_api_client.cleanup({"orphan_protection_time": 0}) # Create repository and remote and verify latest `repository_version` is 0 repo = deb_repository_factory() - remote = deb_remote_factory(policy=policy) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, policy=policy) assert repo.latest_version_href.endswith("/0/") # Sync and verify latest `repository_version` is 1 @@ -57,6 +59,7 @@ def test_download_policy( @pytest.mark.parametrize("policy", ["on_demand", "streamed"]) def test_lazy_sync_immediate_download_test( artifacts_api_client, + deb_get_fixture_server_url, deb_get_remote_by_href, deb_get_repository_by_href, deb_patch_remote, @@ -72,7 +75,8 @@ def test_lazy_sync_immediate_download_test( # Create repository and remote and sync them repo = deb_repository_factory() - remote = deb_remote_factory(policy=policy) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, policy=policy) deb_sync_repository(remote, repo) repo = deb_get_repository_by_href(repo.pulp_href) diff --git a/pulp_deb/tests/functional/api/test_publish.py b/pulp_deb/tests/functional/api/test_publish.py index 3a06fef2d..1932602b9 100644 --- a/pulp_deb/tests/functional/api/test_publish.py +++ b/pulp_deb/tests/functional/api/test_publish.py @@ -37,6 +37,7 @@ ], ) def test_publish_any_repo_version( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_sync_repository, @@ -61,7 +62,8 @@ def test_publish_any_repo_version( cfg = config.get_config() # Create a repository with at least two repository versions - remote = deb_remote_factory(distributions=DEB_FIXTURE_DISTRIBUTIONS) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, distributions=DEB_FIXTURE_DISTRIBUTIONS) repo = deb_repository_factory() deb_sync_repository(remote, repo) for deb_generic_content in get_content(repo.to_dict())[DEB_GENERIC_CONTENT_NAME]: @@ -112,6 +114,7 @@ def test_publish_any_repo_version( ], ) def test_publish_signing_services( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_sync_repository, @@ -135,7 +138,8 @@ def test_publish_signing_services( # Create a repository with at least two dists signing_service = deb_signing_service_factory - remote = deb_remote_factory(distributions=DEB_FIXTURE_DISTRIBUTIONS) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, distributions=DEB_FIXTURE_DISTRIBUTIONS) distro = DEB_FIXTURE_DISTRIBUTIONS.split()[0] repo_options = {} publish_options = {"simple": True, "structured": True} diff --git a/pulp_deb/tests/functional/api/test_sync.py b/pulp_deb/tests/functional/api/test_sync.py index c8df1d38e..5b18324d2 100644 --- a/pulp_deb/tests/functional/api/test_sync.py +++ b/pulp_deb/tests/functional/api/test_sync.py @@ -30,6 +30,7 @@ ], ) def test_sync( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_get_repository_by_href, @@ -40,7 +41,8 @@ def test_sync( """Test whether synchronizations with and without udebs works as expected.""" # Create a repository and a remote and verify latest `repository_version` is 0 repo = deb_repository_factory() - remote = deb_remote_factory(**remote_params) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, **remote_params) assert repo.latest_version_href.endswith("/0/") # Sync the repository @@ -74,7 +76,6 @@ def test_sync( [ ( { - "repo_name": DEB_FIXTURE_INVALID_REPOSITORY_NAME, "architectures": "ppc64", "ignore_missing_package_indices": False, }, @@ -82,7 +83,6 @@ def test_sync( ), ( { - "repo_name": DEB_FIXTURE_INVALID_REPOSITORY_NAME, "architectures": "armeb", "ignore_missing_package_indices": False, }, @@ -92,6 +92,7 @@ def test_sync( ) def test_sync_missing_package_indices( expected, + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_sync_repository, @@ -106,7 +107,8 @@ def test_sync_missing_package_indices( """ # Create repository and remote repo = deb_repository_factory() - remote = deb_remote_factory(**remote_params) + url = deb_get_fixture_server_url(DEB_FIXTURE_INVALID_REPOSITORY_NAME) + remote = deb_remote_factory(url=url, **remote_params) # Verify a PulpTaskError is raised and the error message is as expected with pytest.raises(PulpTaskError) as exc: @@ -117,13 +119,17 @@ def test_sync_missing_package_indices( @pytest.mark.parallel @pytest.mark.parametrize( - "remote_params, expected", + "repo_name, remote_params, expected", [ - ({"url": "http://i-am-an-invalid-url.com/invalid/"}, ["Cannot connect"]), - ({"distributions": "no_dist"}, ["Could not find a Release file at"]), + ("http://i-am-an-invalid-url.com/invalid/", {}, ["Cannot connect"]), + ( + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, + {"distributions": "no_dist"}, + ["Could not find a Release file at"], + ), ( + DEB_FIXTURE_INVALID_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_INVALID_REPOSITORY_NAME, "distributions": "nosuite", "gpgkey": DEB_SIGNING_KEY, }, @@ -133,10 +139,12 @@ def test_sync_missing_package_indices( ) def test_sync_invalid_cases( expected, + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_sync_repository, remote_params, + repo_name, ): """Test whether various invalid sync cases fail as expected. @@ -148,7 +156,8 @@ def test_sync_invalid_cases( """ # Create repository and remote repo = deb_repository_factory() - remote = deb_remote_factory(**remote_params) + url = repo_name if repo_name.startswith("http://") else deb_get_fixture_server_url(repo_name) + remote = deb_remote_factory(url=url, **remote_params) # Verify a PulpTaskError is raised and the error message is as expected with pytest.raises(PulpTaskError) as exc: @@ -159,45 +168,45 @@ def test_sync_invalid_cases( @pytest.mark.parallel @pytest.mark.parametrize( - "remote_params, remote_diff_params", + "repo_name, remote_params, repo_diff_name, remote_diff_params", [ ( + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_STANDARD_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": DEB_FIXTURE_COMPONENT, "architectures": None, }, + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_STANDARD_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": DEB_FIXTURE_COMPONENT_UPDATE, "architectures": None, }, ), ( + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_STANDARD_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": None, "architectures": DEB_FIXTURE_ARCH, }, + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_STANDARD_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": None, "architectures": DEB_FIXTURE_ARCH_UPDATE, }, ), ( + DEB_FIXTURE_STANDARD_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_STANDARD_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": DEB_FIXTURE_COMPONENT, "architectures": None, }, + DEB_FIXTURE_UPDATE_REPOSITORY_NAME, { - "repo_name": DEB_FIXTURE_UPDATE_REPOSITORY_NAME, "distributions": DEB_FIXTURE_SINGLE_DIST, "components": DEB_FIXTURE_COMPONENT_UPDATE, "architectures": None, @@ -206,11 +215,14 @@ def test_sync_invalid_cases( ], ) def test_sync_optimize_no_skip_release_file( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_get_repository_by_href, remote_params, remote_diff_params, + repo_name, + repo_diff_name, deb_sync_repository, ): """Test whether synchronizations have not been skipped for certain conditions. @@ -223,7 +235,8 @@ def test_sync_optimize_no_skip_release_file( """ # Create a repository and a remote and verify latest `repository_version` is 0 repo = deb_repository_factory() - remote = deb_remote_factory(**remote_params) + url = deb_get_fixture_server_url(repo_name) + remote = deb_remote_factory(url=url, **remote_params) assert repo.latest_version_href.endswith("/0/") # Sync the repository @@ -236,7 +249,8 @@ def test_sync_optimize_no_skip_release_file( assert not is_sync_skipped(task, DEB_REPORT_CODE_SKIP_PACKAGE) # Create a new remote with different parameters and sync with repository - remote_diff = deb_remote_factory(**remote_diff_params) + url = deb_get_fixture_server_url(repo_diff_name) + remote_diff = deb_remote_factory(url=url, **remote_diff_params) task_diff = deb_sync_repository(remote_diff, repo) repo = deb_get_repository_by_href(repo.pulp_href) @@ -248,15 +262,20 @@ def test_sync_optimize_no_skip_release_file( @pytest.mark.parallel def test_sync_optimize_skip_unchanged_package_index( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_get_repository_by_href, deb_sync_repository, + apt_package_api, + apt_release_api, + apt_release_component_api, ): """Test whether package synchronization is skipped when a package has not been changed.""" # Create a repository and a remote and verify latest `repository_version` is 0 repo = deb_repository_factory() - remote = deb_remote_factory(distributions=DEB_FIXTURE_SINGLE_DIST) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, distributions=DEB_FIXTURE_SINGLE_DIST) assert repo.latest_version_href.endswith("/0/") # Sync the repository @@ -264,30 +283,67 @@ def test_sync_optimize_skip_unchanged_package_index( repo = deb_get_repository_by_href(repo.pulp_href) # Verify latest `repository_version` is 1 and sync was not skipped - assert repo.latest_version_href.endswith("/1/") + repo_v1_href = repo.latest_version_href + assert repo_v1_href.endswith("/1/") assert not is_sync_skipped(task, DEB_REPORT_CODE_SKIP_RELEASE) assert not is_sync_skipped(task, DEB_REPORT_CODE_SKIP_PACKAGE) # Create new remote with both updated and unchanged packages and sync with repository - remote_diff = deb_remote_factory( - DEB_FIXTURE_UPDATE_REPOSITORY_NAME, distributions=DEB_FIXTURE_SINGLE_DIST - ) + url = deb_get_fixture_server_url(DEB_FIXTURE_UPDATE_REPOSITORY_NAME) + remote_diff = deb_remote_factory(url=url, distributions=DEB_FIXTURE_SINGLE_DIST) task_diff = deb_sync_repository(remote_diff, repo) repo = deb_get_repository_by_href(repo.pulp_href) # Verify latest `repository_version` is 2, release was not skipped and package was skipped - assert repo.latest_version_href.endswith("/2/") + repo_v2_href = repo.latest_version_href + assert repo_v2_href.endswith("/2/") assert not is_sync_skipped(task_diff, DEB_REPORT_CODE_SKIP_RELEASE) assert is_sync_skipped(task_diff, DEB_REPORT_CODE_SKIP_PACKAGE) + # === Test whether the content filters are working. === + # This doesn't _technically_ have anything to do with testing syncing, but it's a + # convenient place to do it since we've already created a repo with content and + # multiple versions. Repo version 1 synced from debian/ragnarok and version 2 synced + # from debian-update/ragnarok. + releases = apt_release_api.list(repository_version=repo_v2_href) + assert releases.count == 1 + release_href = releases.results[0].pulp_href + release_components = apt_release_component_api.list(repository_version=repo_v2_href) + assert release_components.count == 2 + rc = [x for x in release_components.results if x.component == "asgard"] + rc_href = rc[0].pulp_href + + # some simple "happy path" tests to ensure the filters are working properly + assert apt_package_api.list(release=f"{release_href},{repo_v1_href}").count == 4 + assert apt_package_api.list(release=f"{release_href},{repo_v2_href}").count == 6 + assert apt_package_api.list(release=f"{release_href},{repo.pulp_href}").count == 6 + + assert apt_package_api.list(release_component=f"{rc_href},{repo_v1_href}").count == 3 + assert apt_package_api.list(release_component=f"{rc_href},{repo_v2_href}").count == 5 + assert apt_package_api.list(release_component=f"{rc_href},{repo.pulp_href}").count == 5 + + packages = apt_package_api.list(release_component=f"{rc_href},{repo.pulp_href}") + # The package that was added to asgard in debian-update. + package_href = [x for x in packages.results if x.package == "heimdallr"][0].pulp_href + + assert apt_release_api.list(package=f"{package_href},{repo_v1_href}").count == 0 + assert apt_release_api.list(package=f"{package_href},{repo_v2_href}").count == 1 + assert apt_release_api.list(package=f"{package_href},{repo.pulp_href}").count == 1 + + assert apt_release_component_api.list(package=f"{package_href},{repo_v1_href}").count == 0 + assert apt_release_component_api.list(package=f"{package_href},{repo_v2_href}").count == 1 + assert apt_release_component_api.list(package=f"{package_href},{repo.pulp_href}").count == 1 + def test_sync_orphan_cleanup_fail( + deb_get_fixture_server_url, deb_remote_factory, deb_repository_factory, deb_get_repository_by_href, deb_sync_repository, orphans_cleanup_api_client, monitor_task, + delete_orphans_pre, ): """Test whether an orphan cleanup is possible after syncing where only some PackageIndices got changed and older repository versions are not kept. @@ -298,15 +354,15 @@ def test_sync_orphan_cleanup_fail( repo = deb_repository_factory(retain_repo_versions=1) # Create a remote and sync with repo. Verify the latest `repository_version` is 1. - remote = deb_remote_factory(distributions=DEB_FIXTURE_SINGLE_DIST) + url = deb_get_fixture_server_url() + remote = deb_remote_factory(url=url, distributions=DEB_FIXTURE_SINGLE_DIST) deb_sync_repository(remote, repo) repo = deb_get_repository_by_href(repo.pulp_href) assert repo.latest_version_href.endswith("/1/") # Create a new remote with updated packages and sync again. Verify `repository_version` is 2. - remote_diff = deb_remote_factory( - DEB_FIXTURE_UPDATE_REPOSITORY_NAME, distributions=DEB_FIXTURE_SINGLE_DIST - ) + url = deb_get_fixture_server_url(DEB_FIXTURE_UPDATE_REPOSITORY_NAME) + remote_diff = deb_remote_factory(url=url, distributions=DEB_FIXTURE_SINGLE_DIST) deb_sync_repository(remote_diff, repo) repo = deb_get_repository_by_href(repo.pulp_href) assert repo.latest_version_href.endswith("/2/") @@ -316,7 +372,8 @@ def test_sync_orphan_cleanup_fail( task = monitor_task(orphans_cleanup_api_client.cleanup({"orphan_protection_time": 0}).task) assert task.state == "completed" for report in task.progress_reports: - assert report.total == 2 if "Content" in report.message else 5 + if "Content" in report.message: + assert report.done == 2 def is_sync_skipped(task, code): diff --git a/pulp_deb/tests/functional/conftest.py b/pulp_deb/tests/functional/conftest.py index 2eec30d29..fa3146c11 100644 --- a/pulp_deb/tests/functional/conftest.py +++ b/pulp_deb/tests/functional/conftest.py @@ -1,11 +1,10 @@ -from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import gen_distribution, gen_repo from pathlib import Path import pytest import os import stat -from pulp_deb.tests.functional.utils import gen_deb_remote, gen_local_deb_remote +from pulp_deb.tests.functional.utils import gen_local_deb_remote from pulp_smash.utils import execute_pulpcore_python, uuid4 from pulp_deb.tests.functional.constants import DEB_FIXTURE_STANDARD_REPOSITORY_NAME @@ -14,6 +13,8 @@ AptRepositorySyncURL, ContentGenericContentsApi, ContentPackagesApi, + ContentReleasesApi, + ContentReleaseComponentsApi, DebAptPublication, DebVerbatimPublication, DistributionsAptApi, @@ -24,57 +25,70 @@ ) -@pytest.fixture -def apt_client(cid, bindings_cfg): +@pytest.fixture(scope="session") +def apt_client(_api_client_set, bindings_cfg): """Fixture for APT client.""" api_client = ApiClient(bindings_cfg) - api_client.default_headers["Correlation-ID"] = cid - return api_client + _api_client_set.add(api_client) + yield api_client + _api_client_set.remove(api_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_repository_api(apt_client): """Fixture for APT repositories API.""" return RepositoriesAptApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_remote_api(apt_client): """Fixture for APT remote API.""" return RemotesAptApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_publication_api(apt_client): """Fixture for APT publication API.""" return PublicationsAptApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_verbatim_publication_api(apt_client): """Fixture for Verbatim publication API.""" return PublicationsVerbatimApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_distribution_api(apt_client): """Fixture for APT distribution API.""" return DistributionsAptApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") def apt_package_api(apt_client): """Fixture for APT package API.""" return ContentPackagesApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="session") +def apt_release_api(apt_client): + """Fixture for APT release API.""" + return ContentReleasesApi(apt_client) + + +@pytest.fixture(scope="session") +def apt_release_component_api(apt_client): + """Fixture for APT release API.""" + return ContentReleaseComponentsApi(apt_client) + + +@pytest.fixture(scope="session") def apt_generic_content_api(apt_client): """Fixture for APT generic content API.""" return ContentGenericContentsApi(apt_client) -@pytest.fixture +@pytest.fixture(scope="class") def deb_distribution_factory(apt_distribution_api, gen_object_with_cleanup): """Fixture that generates a deb distribution with cleanup from a given publication.""" @@ -86,7 +100,7 @@ def _deb_distribution_factory(publication): return _deb_distribution_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_generic_content_factory(apt_generic_content_api, gen_object_with_cleanup): """Fixture that generates deb generic content with cleanup.""" @@ -96,7 +110,7 @@ def _deb_generic_content_factory(**kwargs): return _deb_generic_content_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_package_factory(apt_package_api, gen_object_with_cleanup): """Fixture that generates deb package with cleanup.""" @@ -106,7 +120,7 @@ def _deb_package_factory(**kwargs): return _deb_package_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_publication_factory(apt_publication_api, gen_object_with_cleanup): """Fixture that generates a deb publication with cleanup from a given repository.""" @@ -117,7 +131,7 @@ def _deb_publication_factory(repo, **kwargs): return _deb_publication_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_repository_factory(apt_repository_api, gen_object_with_cleanup): """Fixture that generates a deb repository with cleanup.""" @@ -127,24 +141,17 @@ def _deb_repository_factory(**kwargs): return _deb_repository_factory -@pytest.fixture -def deb_remote_factory( - apt_remote_api, - deb_fixture_server, - gen_object_with_cleanup, -): +@pytest.fixture(scope="class") +def deb_remote_factory(apt_remote_api, gen_object_with_cleanup): """Fixture that generates a deb remote with cleanup.""" - def _deb_remote_factory(repo_name=DEB_FIXTURE_STANDARD_REPOSITORY_NAME, **kwargs): - if "url" in kwargs: - return gen_object_with_cleanup(apt_remote_api, gen_deb_remote(**kwargs)) - url = deb_fixture_server.make_url(repo_name) + def _deb_remote_factory(url, **kwargs): return gen_object_with_cleanup(apt_remote_api, gen_local_deb_remote(url=str(url), **kwargs)) return _deb_remote_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_remote_custom_data_factory(apt_remote_api, gen_object_with_cleanup): """Fixture that generates a deb remote with cleanup using custom data.""" @@ -154,7 +161,7 @@ def _deb_remote_custom_data_factory(data): return _deb_remote_custom_data_factory -@pytest.fixture +@pytest.fixture(scope="class") def deb_verbatim_publication_factory(apt_verbatim_publication_api, gen_object_with_cleanup): """Fixture that generates a deb verbatim publication with cleanup from a given repository.""" @@ -196,7 +203,7 @@ def _deb_get_remotes_by_name(name): @pytest.fixture -def deb_delete_remote(apt_remote_api): +def deb_delete_remote(apt_remote_api, monitor_task): """Fixture that will delete a deb remote.""" def _deb_delete_remote(remote): @@ -207,7 +214,7 @@ def _deb_delete_remote(remote): @pytest.fixture -def deb_patch_remote(apt_remote_api): +def deb_patch_remote(apt_remote_api, monitor_task): """Fixture that will partially update a deb remote.""" def _deb_patch_remote(remote, content): @@ -218,7 +225,7 @@ def _deb_patch_remote(remote, content): @pytest.fixture -def deb_put_remote(apt_remote_api): +def deb_put_remote(apt_remote_api, monitor_task): """Fixture that will update a deb remote.""" def _deb_put_remote(remote, content): @@ -229,7 +236,7 @@ def _deb_put_remote(remote, content): @pytest.fixture -def deb_sync_repository(apt_repository_api): +def deb_sync_repository(apt_repository_api, monitor_task): """Fixture that synchronizes a given repository with a given remote and returns the monitored task. """ @@ -263,7 +270,7 @@ def deb_signing_script_path(signing_gpg_homedir_path): return f.name -@pytest.fixture +@pytest.fixture(scope="class") def deb_signing_service_factory( cli_client, deb_signing_script_path, diff --git a/setup.py b/setup.py index 60663ad1f..e4f5921c6 100755 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( name="pulp-deb", - version="2.22.0.dev", + version="3.0.0.dev", description="pulp-deb plugin for the Pulp Project", long_description=long_description, long_description_content_type="text/markdown", diff --git a/template_config.yml b/template_config.yml index 0773ec2fb..54a31bd62 100644 --- a/template_config.yml +++ b/template_config.yml @@ -1,7 +1,7 @@ # This config represents the latest values used when running the plugin-template. Any settings that # were not present before running plugin-template have been added with their default values. -# generated with plugin_template@2021.08.26-214-gf2ffaa4 +# generated with plugin_template@2021.08.26-234-gac245b0 additional_repos: [] api_root: /pulp/ @@ -13,10 +13,9 @@ check_stray_pulpcore_imports: true ci_env: {} ci_trigger: '{pull_request: {branches: [''*'']}}' ci_update_branches: -- '2.18' -- '2.19' - '2.20' - '2.21' +ci_update_docs: false cli_package: pulp-cli-deb cli_repo: https://github.com/pulp/pulp-cli-deb.git core_import_allowed: [] @@ -27,6 +26,7 @@ disabled_redis_runners: [] doc_requirements_from_pulpcore: false docker_fixtures: true docs_test: true +extra_docs_requirements: [] flake8: true flake8_ignore: [] github_org: pulp