Skip to content

Bump actions/upload-artifact from 3 to 4 (#3) #9

Bump actions/upload-artifact from 3 to 4 (#3)

Bump actions/upload-artifact from 3 to 4 (#3) #9

Workflow file for this run

name: CI
on:
merge_group:
pull_request:
push:
# We need to explicitly include tags because otherwise when adding
# `branches-ignore` it will only trigger on branches.
tags:
- '*'
branches-ignore:
# Ignore pushes to merge queues.
# We only want to test the merge commit (`merge_group` event), the hashes
# in the push were already tested by the PR checks
- 'gh-readonly-queue/**'
- 'dependabot/**'
workflow_dispatch:
env:
# Please make sure this version is included in the `matrix`, as the
# `matrix` section can't use `env`, so it must be entered manually
DEFAULT_PYTHON_VERSION: '3.11'
# It would be nice to be able to also define a DEFAULT_UBUNTU_VERSION
# but sadly `env` can't be used either in `runs-on`.
jobs:
nox:
name: Test with nox
strategy:
fail-fast: false
matrix:
os:
- ubuntu-20.04
python:
- "3.11"
nox-session:
# To speed things up a bit we use the special ci_checks_max session
# that uses the same venv to run multiple linting sessions
- "ci_checks_max"
- "pytest_min"
runs-on: ${{ matrix.os }}
steps:
- name: Print environment (debug)
run: env
- name: Fetch sources
uses: actions/checkout@v4
with:
submodules: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
cache: 'pip'
- name: Install required Python packages
run: |
python -m pip install --upgrade pip
python -m pip install -e .[dev-noxfile]
pip freeze
- name: Create nox venv
env:
NOX_SESSION: ${{ matrix.nox-session }}
run: nox --install-only -e "$NOX_SESSION"
- name: Print pip freeze for nox venv (debug)
env:
NOX_SESSION: ${{ matrix.nox-session }}
run: |
. ".nox/$NOX_SESSION/bin/activate"
pip freeze
deactivate
- name: Run nox
env:
NOX_SESSION: ${{ matrix.nox-session }}
run: nox -R -e "$NOX_SESSION"
timeout-minutes: 10
# This job runs if all the `nox` matrix jobs ran and succeeded.
# It is only used to have a single job that we can require in branch
# protection rules, so we don't have to update the protection rules each time
# we add or remove a job from the matrix.
nox-all:
# The job name should match the name of the `nox` job.
name: Test with nox
needs: ["nox"]
# We skip this job only if nox was also skipped
if: always() && needs.nox.result != 'skipped'
runs-on: ubuntu-20.04
env:
DEPS_RESULT: ${{ needs.nox.result }}
steps:
- name: Check matrix job result
run: test "$DEPS_RESULT" = "success"
nox-cross-arch:
name: Cross-arch tests with nox
if: github.event_name != 'pull_request'
strategy:
fail-fast: false
# Before adding new items to this matrix, make sure that a dockerfile
# exists for the combination of items in the matrix.
# Refer to .github/containers/nox-cross-arch/README.md to learn how to
# add and name new dockerfiles.
matrix:
arch:
- arm64
os:
- ubuntu-20.04
python:
- "3.11"
nox-session:
- "pytest_min"
- "pytest_max"
runs-on: ${{ matrix.os }}
steps:
- name: Fetch sources
uses: actions/checkout@v4
with:
submodules: true
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: linux/${{ matrix.arch }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# This is a workaround to prevent the cache from growing indefinitely.
# https://docs.docker.com/build/ci/github-actions/cache/#local-cache
# https://github.com/docker/build-push-action/issues/252
# https://github.com/moby/buildkit/issues/1896
- name: Cache container layers
uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-nox-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }}
- name: Build image
uses: docker/build-push-action@v5
with:
context: .github/containers/nox-cross-arch
file: .github/containers/nox-cross-arch/${{ matrix.arch }}-${{ matrix.os }}-python-${{ matrix.python }}.Dockerfile
platforms: linux/${{ matrix.arch }}
tags: localhost/nox-cross-arch:latest
push: false
load: true
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
# Refer to the workaround mentioned above
- name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
# Cache pip downloads
- name: Cache pip downloads
uses: actions/cache@v3
with:
path: /tmp/pip-cache
key: nox-${{ matrix.nox-session }}-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }}-${{ hashFiles('pyproject.toml') }}
# This ensures that the docker container has access to the pip cache.
# Changing the user in the docker-run step causes it to fail due to
# incorrect permissions. Setting the ownership of the pip cache to root
# before running is a workaround to this issue.
- name: Set pip cache owners to root for docker
run: if [[ -e /tmp/pip-cache ]]; then sudo chown -R root:root /tmp/pip-cache; fi
- name: Run nox
run: |
docker run \
--rm \
-v $(pwd):/${{ github.workspace }} \
-v /tmp/pip-cache:/root/.cache/pip \
-w ${{ github.workspace }} \
--net=host \
--platform linux/${{ matrix.arch }} \
localhost/nox-cross-arch:latest \
bash -c "pip install -e .[dev-noxfile]; nox --install-only -e ${{ matrix.nox-session }}; pip freeze; nox -e ${{ matrix.nox-session }}"
timeout-minutes: 30
# This ensures that the runner has access to the pip cache.
- name: Reset pip cache ownership
if: always()
run: sudo chown -R $USER:$USER /tmp/pip-cache
# This job runs if all the `nox-cross-arch` matrix jobs ran and succeeded.
# As the `nox-all` job, its main purpose is to provide a single point of
# reference in branch protection rules, similar to how `nox-all` operates.
# However, there's a crucial difference: the `nox-cross-arch` job is omitted
# in PRs. Without the `nox-cross-arch-all` job, the inner matrix wouldn't be
# expanded in such scenarios. This would lead to the CI indefinitely waiting
# for these jobs to complete due to the branch protection rules, essentially
# causing it to hang. This behavior is tied to a recognized GitHub matrices
# issue when certain jobs are skipped. For a deeper understanding, refer to:
# https://github.com/orgs/community/discussions/9141
nox-cross-arch-all:
# The job name should match the name of the `nox-cross-arch` job.
name: Cross-arch tests with nox
needs: ["nox-cross-arch"]
# We skip this job only if nox-cross-arch was also skipped
if: always() && needs.nox-cross-arch.result != 'skipped'
runs-on: ubuntu-20.04
env:
DEPS_RESULT: ${{ needs.nox-cross-arch.result }}
steps:
- name: Check matrix job result
run: test "$DEPS_RESULT" = "success"
build:
name: Build distribution packages
runs-on: ubuntu-20.04
steps:
- name: Fetch sources
uses: actions/checkout@v4
with:
submodules: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: 'pip'
- name: Install required Python packages
run: |
python -m pip install -U pip
python -m pip install -U build
pip freeze
- name: Build the source and binary distribution
run: python -m build
- name: Upload distribution files
uses: actions/upload-artifact@v4
with:
name: dist-packages
path: dist/
if-no-files-found: error
test-installation:
name: Test package installation in different architectures
needs: ["build"]
runs-on: ubuntu-20.04
steps:
- name: Fetch sources
uses: actions/checkout@v4
- name: Download package
uses: actions/download-artifact@v4
with:
name: dist-packages
path: dist
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up docker-buildx
uses: docker/setup-buildx-action@v3
- name: Test Installation
uses: docker/build-push-action@v5
with:
context: .
file: .github/containers/test-installation/Dockerfile
platforms: linux/amd64,linux/arm64
tags: localhost/test-installation
push: false
test-docs:
name: Test documentation website generation
if: github.event_name != 'push'
runs-on: ubuntu-20.04
steps:
- name: Fetch sources
uses: actions/checkout@v4
with:
submodules: true
- name: Setup Git user and e-mail
uses: frequenz-floss/setup-git-user@v2
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: 'pip'
- name: Install build dependencies
run: |
python -m pip install -U pip
python -m pip install .[dev-mkdocs]
pip freeze
- name: Generate the documentation
env:
MIKE_VERSION: gh-${{ github.job }}
run: |
mike deploy $MIKE_VERSION
mike set-default $MIKE_VERSION
- name: Upload site
uses: actions/upload-artifact@v4
with:
name: docs-site
path: site/
if-no-files-found: error
publish-docs:
name: Publish documentation website to GitHub pages
needs: ["nox-all", "nox-cross-arch-all", "test-installation"]
if: github.event_name == 'push'
runs-on: ubuntu-20.04
permissions:
contents: write
steps:
- name: Fetch sources
uses: actions/checkout@v4
with:
submodules: true
- name: Setup Git user and e-mail
uses: frequenz-floss/setup-git-user@v2
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: 'pip'
- name: Install build dependencies
run: |
python -m pip install -U pip
python -m pip install .[dev-mkdocs]
pip freeze
- name: Calculate and check version
id: mike-version
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
GIT_REF: ${{ github.ref }}
GIT_SHA: ${{ github.sha }}
run: |
python -m frequenz.repo.config.cli.version.mike.info
- name: Fetch the gh-pages branch
if: steps.mike-version.outputs.version
run: git fetch origin gh-pages --depth=1
- name: Build site
if: steps.mike-version.outputs.version
env:
VERSION: ${{ steps.mike-version.outputs.version }}
TITLE: ${{ steps.mike-version.outputs.title }}
ALIASES: ${{ steps.mike-version.outputs.aliases }}
# This is not ideal, we need to define all these variables here
# because we need to calculate all the repository version information
# to be able to show the correct versions in the documentation when
# building it.
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
GIT_REF: ${{ github.ref }}
GIT_SHA: ${{ github.sha }}
run: |
mike deploy --update-aliases --title "$TITLE" "$VERSION" $ALIASES
- name: Sort site versions
if: steps.mike-version.outputs.version
run: |
git checkout gh-pages
python -m frequenz.repo.config.cli.version.mike.sort versions.json
git commit -a -m "Sort versions.json"
- name: Publish site
if: steps.mike-version.outputs.version
run: |
git push origin gh-pages
create-github-release:
name: Create GitHub release
needs: ["publish-docs"]
# Create a release only on tags creation
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
permissions:
# We need write permissions on contents to create GitHub releases and on
# discussions to create the release announcement in the discussion forums
contents: write
discussions: write
runs-on: ubuntu-20.04
steps:
- name: Download distribution files
uses: actions/download-artifact@v4
with:
name: dist-packages
path: dist
- name: Download RELEASE_NOTES.md
run: |
set -ux
gh api \
-X GET \
-f ref=$REF \
-H "Accept: application/vnd.github.raw" \
"/repos/$REPOSITORY/contents/RELEASE_NOTES.md" \
> RELEASE_NOTES.md
env:
REF: ${{ github.ref }}
REPOSITORY: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create GitHub release
run: |
set -ux
extra_opts=
if echo "$REF_NAME" | grep -- -; then extra_opts=" --prerelease"; fi
gh release create \
-R "$REPOSITORY" \
--notes-file RELEASE_NOTES.md \
--generate-notes \
$extra_opts \
$REF_NAME \
dist/*
env:
REF_NAME: ${{ github.ref_name }}
REPOSITORY: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-to-pypi:
name: Publish packages to PyPI
needs: ["create-github-release"]
runs-on: ubuntu-20.04
permissions:
# For trusted publishing. See:
# https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/
id-token: write
steps:
- name: Download distribution files
uses: actions/download-artifact@v4
with:
name: dist-packages
path: dist
- name: Publish the Python distribution to PyPI
uses: pypa/gh-action-pypi-publish@release/v1