Skip to content

Add a test importing all submodules of scikit-decide (#436) #334

Add a test importing all submodules of scikit-decide (#436)

Add a test importing all submodules of scikit-decide (#436) #334

Workflow file for this run

name: 🔨 Build scikit-decide
on:
push:
branches:
- "**"
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
pull_request:
workflow_dispatch:
schedule:
- cron: '45 1 * * 3'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
BOOST_DIR: 3rdparty/boost
BOOST_VERSION: "1.85.0"
SKDECIDE_SKIP_DEPS: 1
MAIN_REPO_NAME: "airbus/scikit-decide"
jobs:
trigger:
# store trigger reason
runs-on: ubuntu-latest
outputs:
is_release: ${{ steps.reason.outputs.is_release }}
is_push_on_default_branch: ${{ steps.reason.outputs.is_push_on_default_branch }}
is_schedule: ${{ steps.reason.outputs.is_schedule }}
on_main_repo: ${{ steps.reason.outputs.on_main_repo }}
steps:
- id: reason
run: |
echo "is_release=${{ startsWith(github.ref, 'refs/tags/v') }}" >> $GITHUB_OUTPUT
echo "is_push_on_default_branch=${{ (github.event_name == 'push') && (github.ref == format('refs/heads/{0}', github.event.repository.default_branch)) }}" >> $GITHUB_OUTPUT
echo "is_schedule=${{ github.event_name == 'schedule' }}" >> $GITHUB_OUTPUT
echo "on_main_repo=${{ github.repository == env.MAIN_REPO_NAME }}" >> $GITHUB_OUTPUT
get-release-version:
needs: trigger
runs-on: ubuntu-latest
outputs:
skdecide-version: ${{ steps.get-version.outputs.skdecide_version }}
tag-name: ${{ steps.get-version.outputs.tag_name }}
steps:
- id: get-version
if: needs.trigger.outputs.is_release == 'true'
run: |
tag_name=${GITHUB_REF/refs\/tags\//} # stripping "refs/tags/"
skdecide_version=${tag_name/v/} # stripping "v"
echo "tag_name=${tag_name}" >> $GITHUB_OUTPUT
echo "skdecide_version=${skdecide_version}" >> $GITHUB_OUTPUT
setup:
runs-on: ubuntu-latest
needs: trigger
outputs:
python_version_test_per_os: ${{ steps.generate-matrix.outputs.python_version_test_per_os }}
python_version_build_per_os: ${{ steps.generate-matrix.outputs.python_version_build_per_os }}
test: ${{ steps.generate-matrix.outputs.test}}
do_macos: ${{ steps.generate-matrix.outputs.do_macos}}
do_ubuntu: ${{ steps.generate-matrix.outputs.do_ubuntu}}
do_windows: ${{ steps.generate-matrix.outputs.do_windows}}
build_doc: ${{ steps.generate-matrix.outputs.build_doc}}
steps:
- uses: actions/setup-python@v5
with:
python-version: 3.9
- name: Generate Matrix
id: generate-matrix
shell: python3 {0}
run: |
from os import environ
python_version_build = ["3.9", "3.10", "3.11", "3.12"]
python_version_test = ["3.9", "3.12"]
test = ["macos-13", "macos-latest", "ubuntu-latest", "windows-latest"]
build_doc = "true"
oses = ["macos", "ubuntu", "windows"]
test_dict = dict({os: [k for k in test if k.startswith(os)] for os in oses})
if "${{ needs.trigger.outputs.is_release == 'true' || needs.trigger.outputs.is_push_on_default_branch == 'true' || needs.trigger.outputs.is_schedule == 'true' }}" == "false":
to_bool = lambda s: True if s == "true" else False
python_filter = {
'3.11': to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.11]') }}"),
'3.12': to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.12]') }}"),
'3.9': to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.9]') }}"),
'3.10': to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.10]') }}"),
}
if any(python_filter.values()):
python_version_build = [v for v in python_version_build if python_filter[v]]
python_version_test = [v for v in python_version_test if python_filter[v]]
os_filter = {
'macos-latest': to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-latest]') }}"),
'macos-13': to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-13]') }}"),
'ubuntu-latest': to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-latest]') }}"),
'windows-latest': to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-latest]') }}"),
}
if set(os_filter.keys()) != set(test):
raise Exception("test and os_filter do not contain the same keys")
if "${{ contains(github.event.head_commit.message, '[ci: windows]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("windows")})
if "${{ contains(github.event.head_commit.message, '[ci: macos]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("macos")})
if "${{ contains(github.event.head_commit.message, '[ci: ubuntu]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("ubuntu")})
# If there is no keyword, proceed as if all were present
if not any(os_filter.values()):
os_filter.update({k: True for k in os_filter})
test = [v for v in test if os_filter[v]]
test_os = {v.split('-')[0] for v in test}
test_dict = dict({os: [k for k in test if k.startswith(os)] for os in oses})
if "${{ contains(github.event.head_commit.message, '[ci: skip-doc]') }}" == "true" or len(test_dict["ubuntu"]) == 0:
build_doc = "false"
python_version_build_per_os = {os: python_version_build for os in oses}
python_version_test_per_os = {os: python_version_test for os in oses}
with open(environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"test={test_dict}\n")
f.write(f"build_doc={build_doc}\n")
for os in oses:
f.write(f"do_{os}={'true' if len(test_dict[os]) > 0 else 'false'}\n")
f.write(f"python_version_build_per_os={python_version_build_per_os}\n")
f.write(f"python_version_test_per_os={python_version_test_per_os}\n")
lint-sources:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.9"
- name: install pre-commit
run: python -m pip install pre-commit
- name: get cached pre-commit hooks
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }}
- name: pre-commit checks
run: pre-commit run --show-diff-on-failure --color=always --all-files
build-windows:
needs: [setup]
if: needs.setup.outputs.do_windows == 'true'
strategy:
matrix:
os: ["windows-latest"]
python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).windows }}
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v4
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v4
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v4
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Build wheel
run: |
export "Boost_ROOT=$PWD/$BOOST_DIR"
python -m pip install --upgrade pip
pip install build poetry-dynamic-versioning
python -m build --sdist --wheel
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x dist/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.os }}-${{ matrix.python-version }}
path: dist/*.whl
build-macos:
needs: [trigger, setup]
if: needs.setup.outputs.do_macos == 'true'
strategy:
matrix:
python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).macos }}
os: [ "macos-latest", "macos-13" ]
arch: [ "arm64", "x86_64" ]
exclude:
- os: macos-13
arch: arm64
- os: macos-latest
arch: x86_64
include:
- arch: x86_64
miniforge_url: "https://github.com/conda-forge/miniforge/releases/download/23.11.0-0/Mambaforge-23.11.0-0-MacOSX-x86_64.sh"
miniforge_sha: "c6ac073b80cedb313561bc11c2e61b0bd102b74df0363ed6c1e90303b322092a"
- arch: arm64
miniforge_url: "https://github.com/conda-forge/miniforge/releases/download/23.11.0-0/Mambaforge-23.11.0-0-MacOSX-arm64.sh"
miniforge_sha: "dd832d8a65a861b5592b2cf1d55f26031f7c1491b30321754443931e7b1e6832"
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v4
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ matrix.arch }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v4
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v4
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-${{ matrix.arch }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Install and restore ccache
if: needs.trigger.outputs.is_release == 'false'
uses: hendrikmuhs/[email protected]
with:
key: ${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.arch }}
max-size: 80M
- name: Let cmake use ccache
if: needs.trigger.outputs.is_release == 'false'
run: |
echo "CMAKE_CXX_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV}
echo "CMAKE_C_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV}
- name: Install conda
run: |
set -ex
# macos runners do not have conda installed. Thus we much install conda manually
EXPECTED_SHA="${{ matrix.miniforge_sha }}"
MINIFORGE_URL="${{ matrix.miniforge_url }}"
curl -L --retry 10 $MINIFORGE_URL -o miniforge.sh
# Check SHA
file_sha=$(shasum -a 256 miniforge.sh | awk '{print $1}')
if [ "$EXPECTED_SHA" != "$file_sha" ]; then
echo "SHA values did not match!"
exit 1
fi
# Install miniforge
MINIFORGE_PATH=$HOME/miniforge
bash ./miniforge.sh -b -p $MINIFORGE_PATH
echo "$MINIFORGE_PATH/bin" >> $GITHUB_PATH
echo "CONDA_HOME=$MINIFORGE_PATH" >> $GITHUB_ENV
- name: Build wheel
env:
ARCH: ${{ matrix.arch }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
if [[ "$ARCH" == arm64 ]]; then
# SciPy requires 12.0 on arm to prevent kernel panics
# https://github.com/scipy/scipy/issues/14688
# We use the same deployment target to match SciPy.
export MACOSX_DEPLOYMENT_TARGET=12.0
OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-arm64/llvm-openmp-11.1.0-hf3c4609_1.tar.bz2"
else
export MACOSX_DEPLOYMENT_TARGET=10.15
OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-64/llvm-openmp-11.1.0-hda6cdc1_1.tar.bz2"
fi
PYTHON_VERSION_WO_DOT=$(echo ${PYTHON_VERSION} | sed -e 's/\.//g') # remove "."
MACOSX_DEPLOYMENT_TARGET_WO_DOT=$(echo ${MACOSX_DEPLOYMENT_TARGET} | sed -e 's/\./_/g') # replace "." by "_"
# install appropriate version of openmp
sudo conda create -n build $OPENMP_URL
# make openmp and boost available
export Boost_ROOT=$PWD/$BOOST_DIR
export OpenMP_ROOT=$CONDA_HOME/envs/build
export CPPFLAGS="$CPPFLAGS -Xpreprocessor -fopenmp"
export CFLAGS="$CFLAGS -I$OpenMP_ROOT/include"
export CXXFLAGS="$CXXFLAGS -I$OpenMP_ROOT/include"
export LDFLAGS="$LDFLAGS -Wl,-rpath,$OpenMP_ROOT/lib -L$OpenMP_ROOT/lib -lomp"
# cmake flag to cross-compile the c++
export CMAKE_OSX_ARCHITECTURES=${ARCH}
python -m pip install cibuildwheel
# cibuildwheel flags
export CIBW_BUILD_FRONTEND="build"
export CIBW_ARCHS=${ARCH}
export CIBW_BUILD="cp${PYTHON_VERSION_WO_DOT}-macosx_${ARCH}"
# build wheel
python -m cibuildwheel --output-dir wheelhouse
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x wheelhouse/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.arch }}
path: wheelhouse/*.whl
build-ubuntu:
needs: [trigger, setup]
if: needs.setup.outputs.do_ubuntu == 'true'
strategy:
matrix:
os: ["ubuntu-latest"]
python-version: ${{ fromJSON(needs.setup.outputs.python_version_build_per_os).ubuntu }}
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v4
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v4
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v4
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v4
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Restore docker dev image
id: cache-dev-deps
uses: actions/cache@v4
with:
path: /tmp/docker
key: dev-deps-${{ runner.os }}-${{ hashFiles('scripts/build-skdecide_dev.sh', 'scripts/Dockerfile_x86_64_dev') }}
- name: Restore ccache cache
if: needs.trigger.outputs.is_release == 'false'
id: ccache-restore
uses: actions/cache@v4
with:
path: .ccache
key: ccache-${{ runner.os }}-py${{ matrix.python-version }}-${{ github.run_id }}-${{github.run_number}}
restore-keys: ccache-${{ runner.os }}-py${{ matrix.python-version }}
- name: Build wheels
run: |
# Load skdecide_dev image from cache, or build it if not found
if test -f /tmp/docker/skdecide_dev.tar; then
docker image load -i /tmp/docker/skdecide_dev.tar
else
docker build -f scripts/Dockerfile_x86_64_dev -t skdecide_dev .
mkdir -p /tmp/docker
docker image save -o /tmp/docker/skdecide_dev.tar skdecide_dev
fi
if ${{ needs.trigger.outputs.is_release }} == 'false'; then
# The existence of .ccache directory triggers ccache use in builds-manylinux-wheels.sh
test -d .ccache || mkdir .ccache
fi
docker build -f scripts/Dockerfile_x86_64 -t skdecide_x86_64 --build-arg PYTHON_VERSION=${{matrix.python-version}} --build-arg SKDECIDE_SKIP_DEPS=${SKDECIDE_SKIP_DEPS} --build-arg BOOST_DIR=${BOOST_DIR} .
# Fetch wheels from Docker
docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/dist /mytmp
if ${{ needs.trigger.outputs.is_release }} == 'false'; then
# Fetch ccache from Docker
docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/.ccache /mytmp
fi
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x dist/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v4
with:
name: wheels-${{ matrix.os }}-${{ matrix.python-version }}
path: dist/*.whl
test-windows:
needs: [build-windows, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).windows }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).windows }}
compiler: [gnu]
fail-fast: false
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
env:
minizinc_config_cmdline: export PATH=$PATH:~/AppData/Local/Programs/MiniZinc
minizinc_cache_path: ~/AppData/Local/Programs/MiniZinc
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.5/MiniZincIDE-2.8.5-bundled-setup-win64.exe
minizinc_downloaded_filepath: minizinc_setup.exe
minizinc_install_cmdline: cmd //c "minizinc_setup.exe /verysilent /currentuser /norestart /suppressmsgboxes /sp"
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v4
with:
pattern: wheels-windows*-${{ matrix.python-version }}
merge-multiple: true
path: wheels
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v4
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install scikit-decide and test dependencies
run: |
pip install ray[rllib]>=2.20
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*win*.whl)
if [ "$python_version" = "3.12" ]; then
pip install ${wheelfile}[all] pytest "pygame>=2.5" optuna "cffi>=1.17"
else
pip install ${wheelfile}[all] pytest gymnasium[classic-control] optuna
fi
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python --ignore tests/solvers/python/test_optuna_rayrllib.py
pytest -v -s tests/solvers/python/test_optuna_rayrllib.py
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling
test-macos:
needs: [build-macos, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).macos }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).macos }}
fail-fast: false
runs-on: ${{ matrix.os }}
env:
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources
minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.5/MiniZincIDE-2.8.5-bundled.dmg
minizinc_downloaded_filepath: bin/minizinc.dmg
minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/.
steps:
- uses: actions/checkout@v4
- name: Install needed brew dependencies
run: brew install libomp eccodes
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v4
with:
pattern: wheels-macos*-${{ matrix.python-version }}*
merge-multiple: true
path: wheels
- name: Create bin/
run: mkdir -p bin
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v4
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install prerelease version of pymip (only for macos arm64)
if: matrix.os == 'macos-latest'
run: |
python -m pip install -U pip
pip install mip==1.16rc0
- name: Install scikit-decide and test dependencies
run: |
python_version=${{ matrix.python-version }}
arch=$(uname -m)
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*macos*${arch}.whl)
if [ "$python_version" = "3.12" ]; then
pip install ${wheelfile}[all] pytest "pygame>=2.5" optuna "cffi>=1.17" "git+https://github.com/pyrddlgym-project/pyRDDLGym-gurobi"
else
pip install ${wheelfile}[all] pytest gymnasium[classic-control] optuna "git+https://github.com/pyrddlgym-project/pyRDDLGym-gurobi"
fi
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python --ignore tests/solvers/python/test_optuna_rayrllib.py --ignore tests/solvers/python/test_pyrddlgym_solvers.py
pytest -v -s tests/solvers/python/test_optuna_rayrllib.py
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling --ignore tests/domains/python/test_pyrddlgym_domains.py --ignore tests/solvers/python/test_pyrddlgym_solvers.py
pytest -v -s tests/domains/python/test_pyrddlgym_domains.py tests/solvers/python/test_pyrddlgym_solvers.py
test-ubuntu:
needs: [build-ubuntu, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).ubuntu }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_test_per_os).ubuntu }}
fail-fast: false
runs-on: ${{ matrix.os }}
env:
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib
minizinc_cache_path: $(pwd)/bin/squashfs-root
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.8.5/MiniZincIDE-2.8.5-x86_64.AppImage
minizinc_downloaded_filepath: bin/minizinc.AppImage
minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd ..
minizinc_prerequisites_cmdline: sudo apt update && sudo apt install libegl1 -y
steps:
- uses: actions/checkout@v4
- name: Install needed apt dependencies
uses: awalsh128/cache-apt-pkgs-action@latest
with:
packages: libeccodes-dev
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v4
with:
pattern: wheels-ubuntu*-${{ matrix.python-version }}
merge-multiple: true
path: wheels
- name: Create bin/
run: mkdir -p bin
- name: Minizinc prerequisites
run: |
${{ env.minizinc_prerequisites_cmdline }}
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v4
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install scikit-decide
run: |
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*manylinux*.whl)
if [ "$python_version" = "3.12" ]; then
pip install ${wheelfile}[all] pytest "pygame>=2.5" "cffi>=1.17" docopt commonmark optuna "git+https://github.com/pyrddlgym-project/pyRDDLGym-gurobi"
else
pip install ${wheelfile}[all] pytest gymnasium[classic-control] docopt commonmark optuna "git+https://github.com/pyrddlgym-project/pyRDDLGym-gurobi"
fi
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python --ignore tests/solvers/python/test_optuna_rayrllib.py
pytest -v -s tests/solvers/python/test_optuna_rayrllib.py
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling
- name: Test python block codes from guide
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# extract block codes
python scripts/md2py.py docs/guide/README.md tests/test_guide.py
# test it
python tests/test_guide.py
upload-release:
needs: [ get-release-version, trigger, test-ubuntu, test-macos, test-windows ]
if: |
(needs.trigger.outputs.is_release == 'true')
|| ((needs.trigger.outputs.is_push_on_default_branch == 'true') && (needs.trigger.outputs.on_main_repo == 'true'))
|| ((needs.trigger.outputs.is_schedule == 'true') && (needs.trigger.outputs.on_main_repo == 'true'))
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
with:
pattern: wheels-*
merge-multiple: true
path: dist/
- name: Upload artifacts 📦 to release
uses: ncipollo/release-action@v1
if: needs.trigger.outputs.is_release == 'true'
with:
artifacts: dist/*.whl
tag: ${{ needs.get-release-version.outputs.tag-name }}
allowUpdates: true
generateReleaseNotes: true
- if: needs.trigger.outputs.is_release == 'false'
run: zip -r dist.zip dist/
- uses: actions/github-script@v7
if: needs.trigger.outputs.is_release == 'false'
id: asset
with:
script: |
const fs = require('fs');
// Get the ref for master
const master_sha = '${{ github.sha }}';
console.log(`master reference ${master_sha}`);
// Retrieve ref for tag `nightly`
let ref_nightly = null;
try {
ref_nightly = await github.rest.git.getRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'tags/nightly',
});
if (ref_nightly.data.object.sha === master_sha) {
return '';
}
} catch (err) {
// The tag does not exist so let's create it
ref_nightly = await github.rest.git.createRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'refs/tags/nightly',
sha: master_sha,
});
}
// Call the GitHub API to get a release by tag
let release = null;
try {
release = await github.rest.repos.getReleaseByTag({
owner: context.repo.owner,
repo: context.repo.repo,
tag: 'nightly',
});
console.log(`Found release ${release.data.tag_name} ${release.data.draft} ${release.data.prerelease}`);
} catch (err) {
console.log(`Release 'nightly' not found`);
// If the release doesn't exist, create it
release = await github.rest.repos.createRelease({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: 'nightly',
name: 'nightly',
body: 'Nightly release crafted with ♥️ somewhere on 🌎',
draft: false,
prerelease: true,
});
console.log(`Created release ${release.data.tag_name} ${release.data.draft} ${release.data.prerelease}`);
}
console.log(`Release does exist with tag ${release.data.tag_name} [${release.data.draft} ${release.data.prerelease}]`);
// At this stage both tag & release exist
// Update nightly tag
await github.rest.git.updateRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'tags/nightly',
sha: master_sha,
force: true,
});
console.log(`Updated tag with sha ${ref_nightly.data.object.sha}`);
// Update the release
await github.rest.repos.updateRelease({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
tag_name: 'nightly',
name: 'nightly',
body: 'Nightly release crafted with ♥️ somewhere on 🌎',
draft: false,
prerelease: true,
});
console.log(`Updated ${release.data.tag_name} nightly release ${release.data.draft} ${release.data.prerelease}`);
// Get all tags and keep the newest one starting by v
let newest_tag = { name: 'v0.0.0' };
const tags = await github.rest.repos.listTags({
owner: context.repo.owner,
repo: context.repo.repo,
});
// Keep latest tag
for (const tag of tags.data) {
if (tag.name.startsWith('v')) {
if (tag.name.localeCompare(newest_tag.name, undefined, { numeric: true}) > 0) {
newest_tag = tag;
}
}
}
console.log(`Previous release has tag ${newest_tag.name} → ${newest_tag.commit.sha}`);
// Count all commits between HEAD and newest tag
// Limited to 250 commits
const distance = await github.rest.repos.compareCommitsWithBasehead({
owner: context.repo.owner,
repo: context.repo.repo,
basehead: `${newest_tag.commit.sha}...${master_sha}`,
}).then(d => d.data.total_commits);
// Zip a zip file from dist directory
let release_name = `nightly_${distance}_${master_sha.substring(0,8)}` + '.zip';
console.log(`Release file name: ${release_name}`);
fs.renameSync('dist.zip', release_name);
// Upload the zip file to GitHub
const uploadedAsset = await github.rest.repos.uploadReleaseAsset({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
name: release_name,
data: fs.readFileSync(release_name),
headers: {
'content-type': 'application/zip',
},
});
return uploadedAsset.data.browser_download_url;
result-encoding: string
update-notebooks-for-colab-and-binder:
runs-on: ubuntu-latest
needs: [ trigger, get-release-version, build-ubuntu ]
if: needs.trigger.outputs.is_release == 'true'
outputs:
notebooks-branch: ${{ steps.write-output.outputs.notebooks_branch }}
binder-full-ref: ${{ steps.write-output.outputs.binder_full_ref }}
steps:
- uses: actions/checkout@v4
- name: replace scikit-decide version to install in colab notebooks
run: |
version=${{ needs.get-release-version.outputs.skdecide-version }}
old_pip_spec_pattern="\(skdecide_pip_spec.*\)scikit-decide\[all\]"
new_pip_spec_pattern="\1scikit-decide[all]==${version}"
if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then
# install from TestPypi if on a fork
new_pip_spec_pattern="${new_pip_spec_pattern} --extra-index-url https://test.pypi.org/simple/"
fi
old_using_nightly_pattern="\(using_nightly_version\s*=\s*\)True"
new_using_nightly_pattern="using_nightly_version = False"
shopt -s globstar # enable **
sed -i \
-e "s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|g" \
-e "s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|g" \
notebooks/**/*.ipynb
- name: replace scikit-decide version to install in binder environment
run: |
version=${{ needs.get-release-version.outputs.skdecide-version }}
# environment.yml
linefilter="/^name/!"
old_pip_spec_pattern="\(\s*\)-.*scikit-decide.*$"
new_pip_spec_pattern="\1- scikit-decide[all]==$version"
if ${{ github.repository != env.MAIN_REPO_NAME && secrets.TEST_PYPI_API_TOKEN != '' }} == 'true'; then
# install from TestPypi if on a fork
new_pip_spec_pattern="${new_pip_spec_pattern}\n\1- --extra-index-url https://test.pypi.org/simple/"
fi
sed_command="${linefilter}s|${old_pip_spec_pattern}|${new_pip_spec_pattern}|"
echo sed -i -e ${sed_command} binder/environment.yml
sed -i -e "${sed_command}" binder/environment.yml
# postBuild
old_using_nightly_pattern="using_nightly_version=true"
new_using_nightly_pattern="using_nightly_version=false"
sed_command="s|${old_using_nightly_pattern}|${new_using_nightly_pattern}|"
sed -i -e "${sed_command}" binder/postBuild
- name: push modifications on a dedicated tag
id: push-tuto-release-tag
run: |
current_tag_name=${{ needs.get-release-version.outputs.tag-name }}
new_tag_name="notebooks-${current_tag_name}"
echo ${new_tag_name}
git config user.name "Actions"
git config user.email "[email protected]"
git commit binder notebooks -m "Install appropriate version of scikit-decide"
git tag ${new_tag_name} -m "Use release ${current_tag_name} in binder and colab"
git push origin ${new_tag_name}
# store new tag name as notebooks branch
echo "notebooks_branch=${new_tag_name}" >> $GITHUB_ENV
echo "binder_full_ref=${{ github.repository }}/${new_tag_name}" >> $GITHUB_ENV
- name: write new notebooks branch in job outputs
id: write-output
run: |
echo "notebooks_branch=${notebooks_branch}" >> $GITHUB_OUTPUT
echo "binder_full_ref=${binder_full_ref}" >> $GITHUB_OUTPUT
build-doc:
needs: [ build-ubuntu, setup, update-notebooks-for-colab-and-binder, get-release-version ]
# if: always()
# -> trigger even if one needed job was skipped (namely update-notebooks-for-colab-and-binder)
# -> needed jobs successes must be checked explicitely
if: |
always()
&& (needs.setup.outputs.build_doc == 'true')
&& (needs.build-ubuntu.result == 'success')
uses: ./.github/workflows/build-doc.yml
with:
notebooks-branch: ${{ needs.update-notebooks-for-colab-and-binder.outputs.notebooks-branch }}
doc-version: ${{ needs.get-release-version.outputs.skdecide-version }}
deploy:
needs: [ trigger, test-ubuntu, test-macos, test-windows ]
if: needs.trigger.outputs.is_release == 'true'
runs-on: ubuntu-latest
steps:
- name: Download artifact
uses: actions/download-artifact@v4
with:
pattern: wheels-*
merge-multiple: true
path: wheels
- name: Publish distribution 📦 to PyPI
env:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
if: needs.trigger.outputs.on_main_repo == 'true' && env.PYPI_API_TOKEN != ''
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
packages_dir: wheels/
- name: Publish distribution 📦 to Test PyPI
env:
TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }}
if: needs.trigger.outputs.on_main_repo == 'false' && env.TEST_PYPI_API_TOKEN != ''
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
packages_dir: wheels/
repository_url: https://test.pypi.org/legacy/
deploy-doc:
needs: [trigger, get-release-version, update-notebooks-for-colab-and-binder, build-doc, test-windows, test-macos, test-ubuntu, upload-release, deploy]
# if: always()
# -> trigger even if one needed job was skipped (namely upload-release or deploy)
# -> needed jobs successes must be checked explicitely
if: |
always()
&& (needs.build-doc.result == 'success')
&& (needs.test-windows.result == 'success')
&& (needs.test-macos.result == 'success')
&& (needs.test-ubuntu.result == 'success')
&& (
(
(needs.trigger.outputs.is_push_on_default_branch == 'true')
&& (
(needs.upload-release.result == 'success')
|| (needs.trigger.outputs.on_main_repo == 'false')
)
)
|| (
(needs.trigger.outputs.is_release == 'true')
&& (needs.deploy.result == 'success')
)
)
uses: ./.github/workflows/deploy-doc.yml
with:
binder-env-fullref: ${{ needs.update-notebooks-for-colab-and-binder.outputs.binder-full-ref }}
doc-clean: ${{ needs.trigger.outputs.is_release == 'false'}}
doc-version: ${{ needs.get-release-version.outputs.skdecide-version }}