ENH: Fix caching-related warnings in GHA build-test-publish
CI
#97
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
push: | |
branches: | |
- master | |
- main | |
- maint/* | |
tags: | |
- '*' | |
pull_request: | |
branches: | |
- master | |
- main | |
- maint/* | |
schedule: | |
# 9am EST / 10am EDT Mondays | |
- cron: 0 14 * * 0 | |
# Allow job to be triggered manually from GitHub interface | |
workflow_dispatch: | |
# Force pytest to use color | |
env: | |
FORCE_COLOR: true | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
permissions: | |
contents: read | |
defaults: | |
run: | |
shell: bash -el {0} | |
jobs: | |
build-package: | |
name: Build & inspect package | |
runs-on: ubuntu-latest | |
permissions: | |
attestations: write | |
id-token: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 | |
- uses: hynek/build-and-inspect-python-package@v2 | |
test: | |
if: "!contains(github.event.head_commit.message, '[skip ci]' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'nipreps/sdcflows'))" | |
runs-on: ubuntu-latest | |
env: | |
TEST_DATA_HOME: /home/runner/sdcflows-tests | |
FSLOUTPUTTYPE: NIFTI_GZ | |
FSLMULTIFILEQUIT: TRUE | |
AFNI_HOME: /opt/afni | |
AFNI_MODELPATH: /opt/afni/models | |
AFNI_IMSAVE_WARNINGS: NO | |
AFNI_TTATLAS_DATASET: /opt/afni/atlases | |
AFNI_PLUGINPATH: /opt/afni/plugins | |
MARKS: ${{ matrix.marks }} | |
DEPENDS: ${{ matrix.dependencies }} | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] | |
dependencies: ["latest", "pre"] | |
marks: ["fast"] | |
include: | |
- python-version: "3.9" | |
dependencies: "min" | |
marks: "fast" | |
- python-version: "3.9" | |
dependencies: "latest" | |
marks: "slow" | |
- python-version: "3.12" | |
dependencies: "latest" | |
marks: "veryslow" | |
exclude: | |
- python-version: "3.9" | |
dependencies: "pre" | |
- python-version: "3.10" | |
dependencies: "pre" | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/cache@v4 | |
with: | |
path: ${{ runner.temp }}/cache-linux | |
key: apt-cache-v3 | |
restore-keys: | | |
apt-cache-v3 | |
- name: Install dependencies | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y --no-install-recommends \ | |
curl bzip2 ca-certificates \ | |
tcsh gsl-bin netpbm \ | |
libjpeg62 xvfb xterm \ | |
libglu1-mesa-dev libglw1-mesa \ | |
libxm4 build-essential | |
- name: Restore cache for AFNI | |
id: cache-afni | |
uses: actions/cache@v4 | |
with: | |
path: /opt/afni | |
key: afni-v1-${{ matrix.python-version }}-${{ matrix.dependencies }}-${{ matrix.marks }} | |
restore-keys: | | |
afni-v1- | |
- name: Install AFNI | |
if: steps.cache-afni.outputs.cache-hit != 'true' | |
run: | | |
if [[ ! -d "${AFNI_HOME}" ]]; then | |
curl -O https://afni.nimh.nih.gov/pub/dist/bin/misc/@update.afni.binaries && \ | |
tcsh @update.afni.binaries -package linux_ubuntu_16_64 -bindir ${AFNI_HOME} | |
fi | |
ls -l ${AFNI_HOME} | |
echo "PATH=${AFNI_HOME}:$PATH" | tee -a $GITHUB_ENV | |
- name: Git settings (pacify DataLad) | |
run: | | |
git config --global user.name 'NiPreps Bot' | |
git config --global user.email '[email protected]' | |
- name: Install the latest version of uv | |
uses: astral-sh/setup-uv@v4 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: conda-incubator/setup-miniconda@v3 | |
with: | |
auto-update-conda: true | |
auto-activate-base: true | |
python-version: ${{ matrix.python-version }} | |
channels: https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/,conda-forge | |
- uses: actions/cache@v4 | |
id: conda | |
env: | |
CACHE_NUM: v5 | |
with: | |
path: | | |
~/conda_pkgs_dir | |
/home/runner/.cache/pip | |
key: python-${{ matrix.python-version }}-${{ env.CACHE_NUM }} | |
- name: Install DataLad | |
run: | | |
conda install git-annex=*=alldep* | |
uv tool install datalad --with=datalad-next --with=datalad-osf | |
uv tool install datalad-osf --with=datalad-next | |
- name: Install fsl and ANTs | |
run: | | |
conda install fsl-fugue fsl-topup ants | |
- uses: actions/cache@v4 | |
with: | |
path: ~/.cache/templateflow | |
key: tf-cache-v1 | |
restore-keys: | | |
tf-cache- | |
- name: Get TemplateFlow's required objects | |
run: | | |
uv run tools/cache_templateflow.py | |
- uses: actions/cache@v4 | |
with: | |
path: ${{ env.TEST_DATA_HOME }} | |
key: data-cache-v2 | |
restore-keys: | | |
data-cache- | |
- name: Install test data | |
run: | | |
mkdir -p ${{ env.TEST_DATA_HOME }} | |
cd ${{ env.TEST_DATA_HOME }} | |
# ds001600 | |
datalad install -r https://github.com/nipreps-data/ds001600.git | |
datalad update -r --merge -d ds001600/ | |
datalad get -r -J 2 -d ds001600/ ds001600/sub-1/ | |
# HCP/sub-101006 | |
datalad install -r https://github.com/nipreps-data/HCP101006.git | |
datalad update -r --merge -d HCP101006/ | |
datalad get -r -J 2 -d HCP101006 HCP101006/* | |
# ds001771 | |
datalad install -r https://github.com/nipreps-data/ds001771.git | |
datalad update -r --merge -d ds001771/ | |
datalad get -r -J 2 -d ds001771/ ds001771/sub-36/* | |
datalad get -r -J 2 -d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/* | |
# ds000054 | |
datalad install -r https://github.com/nipreps-data/ds000054.git | |
datalad update --merge -d ds000054/ | |
datalad get -r -d ds000054/ ds000054/sub-100185/* | |
datalad get -r -J 2 -d ds000054/ ds000054/derivatives/smriprep-0.6/sub-100185/anat/ | |
# ds000206 | |
datalad install -r https://github.com/nipreps-data/ds000206.git | |
datalad update -r --merge -d ds000206/ | |
datalad get -r -J 2 -d ds000206/ ds000206/sub-05/ | |
# Brain extraction tests | |
datalad install -r https://gin.g-node.org/nipreps-data/brain-extraction-tests | |
datalad update --merge -d brain-extraction-tests/ | |
datalad get -r -J 2 -d brain-extraction-tests brain-extraction-tests/* | |
# HCPH pilot | |
datalad install -r https://github.com/nipreps-data/hcph-pilot_fieldmaps.git | |
datalad update -r --merge -d hcph-pilot_fieldmaps/ | |
datalad get -r -J 2 -d hcph-pilot_fieldmaps/ hcph-pilot_fieldmaps/* | |
- name: Set FreeSurfer variables | |
run: | | |
echo "FREESURFER_HOME=$HOME/.cache/freesurfer" >> $GITHUB_ENV | |
echo "FS_LICENSE=$HOME/.cache/freesurfer/license.txt" >> $GITHUB_ENV | |
- name: Install FreeSurfer's mri_robust_template | |
env: | |
MRI_ROBUST_TEMPLATE: sx2n7/providers/osfstorage/5e825301d0e35400ebb481f2 | |
run: | | |
curl https://files.osf.io/v1/resources/$MRI_ROBUST_TEMPLATE?direct > mri_robust_template | |
sudo install mri_robust_template /usr/local/bin | |
mkdir -p $( dirname $FS_LICENSE ) | |
echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $FS_LICENSE | |
- name: Install tox | |
run: | | |
uv tool install tox --with=tox-uv --with=tox-gh-actions | |
- name: Show tox config | |
run: tox c | |
- name: Run tox | |
run: tox -v --exit-and-dump-after 1200 | |
- uses: codecov/codecov-action@v5 | |
with: | |
token: ${{ secrets.CODECOV_TOKEN }} | |
if: ${{ always() }} | |
publish: | |
name: Publish released package to pypi.org | |
environment: release-pypi | |
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') | |
runs-on: ubuntu-latest | |
needs: [build-package, test] | |
permissions: | |
attestations: write | |
id-token: write | |
steps: | |
- name: Download packages built by build-and-inspect-python-package | |
uses: actions/download-artifact@v4 | |
with: | |
name: Packages | |
path: dist | |
- name: Upload package to PyPI | |
uses: pypa/gh-action-pypi-publish@release/v1 |