Skip to content

Extend pre-commit hooks with codespell #3741

Extend pre-commit hooks with codespell

Extend pre-commit hooks with codespell #3741

Workflow file for this run

name: Conda package
on:
push:
branches:
- master
pull_request:
permissions: read-all
env:
PACKAGE_NAME: dpnp
MODULE_NAME: dpnp
CHANNELS: '-c dppy/label/dev -c intel -c conda-forge --override-channels'
CONDA_BUILD_VERSION: '24.1.2'
CONDA_INDEX_VERSION: '0.4.0'
TEST_ENV_NAME: 'test'
# TODO: to add test_arraymanipulation.py back to the scope once crash on Windows is gone
TEST_SCOPE: >-
test_arraycreation.py
test_amin_amax.py
test_dparray.py
test_copy.py
test_fft.py
test_linalg.py
test_logic.py
test_manipulation.py
test_mathematical.py
test_product.py
test_random_state.py
test_sort.py
test_special.py
test_statistics.py
test_sycl_queue.py
test_umath.py
test_usm_type.py
third_party/cupy/core_tests
third_party/cupy/linalg_tests/test_decomposition.py
third_party/cupy/linalg_tests/test_norms.py
third_party/cupy/linalg_tests/test_product.py
third_party/cupy/linalg_tests/test_solve.py
third_party/cupy/logic_tests/test_comparison.py
third_party/cupy/logic_tests/test_truth.py
third_party/cupy/manipulation_tests/test_basic.py
third_party/cupy/manipulation_tests/test_dims.py
third_party/cupy/manipulation_tests/test_join.py
third_party/cupy/manipulation_tests/test_rearrange.py
third_party/cupy/manipulation_tests/test_transpose.py
third_party/cupy/math_tests
third_party/cupy/sorting_tests/test_sort.py
third_party/cupy/sorting_tests/test_count.py
third_party/cupy/statistics_tests/test_meanvar.py
VER_JSON_NAME: 'version.json'
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))"
jobs:
build:
name: Build ['${{ matrix.os }}', python='${{ matrix.python }}']
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, windows-latest]
permissions:
# Needed to cancel any previous runs that are not completed for a given workflow
actions: write
runs-on: ${{ matrix.os }}
defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}
continue-on-error: false
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@85880fa0301c86cca9da44039ee3bb12d3bedbfa # 0.12.1
with:
access_token: ${{ github.token }}
- name: Checkout DPNP repo
uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3
with:
fetch-depth: 0
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'build'
- if: matrix.os == 'ubuntu-20.04'
name: Store conda paths as envs on Linux
run: echo "CONDA_BLD=$CONDA_PREFIX/conda-bld/linux-64/" >> $GITHUB_ENV
- if: matrix.os == 'windows-latest'
name: Store conda paths as envs on Win
run: |
@echo on
(echo CONDA_BLD=%CONDA_PREFIX%\conda-bld\win-64\) >> %GITHUB_ENV%
- name: Install conda-build
run: conda install conda-build=${{ env.CONDA_BUILD_VERSION}}
- name: Cache conda packages
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.CONDA_PKGS_DIR }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Build conda package
run: conda build --no-test --python ${{ matrix.python }} --numpy 1.23 ${{ env.CHANNELS }} conda-recipe
- name: Upload artifact
uses: actions/upload-artifact@1746f4ab65b179e0ea60a494b83293b640dd5bba # v4.3.2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2
test_linux:
name: Test ['${{ matrix.os }}', python='${{ matrix.python }}']
needs: build
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, ubuntu-latest]
continue-on-error: true
env:
conda-pkgs: '/home/runner/conda_pkgs_dir/'
channel-path: '${{ github.workspace }}/channel/'
pkg-path-in-channel: '${{ github.workspace }}/channel/linux-64/'
extracted-pkg-path: '${{ github.workspace }}/pkg/'
tests-path: '${{ github.workspace }}/pkg/info/test/tests/'
ver-json-path: '${{ github.workspace }}/version.json'
steps:
- name: Download artifact
uses: actions/download-artifact@8caf195ad4b1dee92908e23f56eeb0696f1dd42d # v4.1.5
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}
- name: Extract package archive
run: |
mkdir -p ${{ env.extracted-pkg-path }}
tar -xvf ${{ env.pkg-path-in-channel }}/${{ env.PACKAGE_NAME }}-*.tar.bz2 -C ${{ env.extracted-pkg-path }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: ${{ env.TEST_ENV_NAME }}
- name: Install conda-index
run: conda install conda-index=${{ env.CONDA_INDEX_VERSION }}
- name: Create conda channel
run: |
python -m conda_index ${{ env.channel-path }}
- name: Test conda channel
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}
cat ${{ env.ver-json-path }}
- name: Collect dependencies
run: |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")
echo PACKAGE_VERSION=${PACKAGE_VERSION}
echo "PACKAGE_VERSION=$PACKAGE_VERSION" >> $GITHUB_ENV
conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
cat lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: Cache conda packages
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install dpnp
run: conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: List installed packages
run: conda list
- name: Smoke test
run: |
python -c "import dpnp, dpctl; dpctl.lsplatform()"
python -c "import dpnp; print(dpnp.__version__)"
# TODO: run the whole scope once the issues on CPU are resolved
# - name: Run tests
# run: |
# python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
# working-directory: ${{ env.tests-path }}
# TODO: remove once 2024.2 release is published
- name: Run tests
id: run_tests_linux
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
shell: bash
timeout_minutes: 10
max_attempts: 5
retry_on: any
command: |
. $CONDA/etc/profile.d/conda.sh
conda activate ${{ env.TEST_ENV_NAME }}
cd ${{ env.tests-path }}
python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
test_windows:
name: Test ['windows-latest', python='${{ matrix.python }}']
needs: build
runs-on: windows-latest
defaults:
run:
shell: cmd /C CALL {0}
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
continue-on-error: true
env:
conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\'
channel-path: '${{ github.workspace }}\channel\'
pkg-path-in-channel: '${{ github.workspace }}\channel\win-64\'
extracted-pkg-path: '${{ github.workspace }}\pkg'
tests-path: '${{ github.workspace }}\pkg\info\test\tests\'
ver-json-path: '${{ github.workspace }}\version.json'
steps:
- name: Download artifact
uses: actions/download-artifact@8caf195ad4b1dee92908e23f56eeb0696f1dd42d # v4.1.5
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}
- name: Extract package archive
run: |
@echo on
mkdir -p ${{ env.extracted-pkg-path }}
set SEARCH_SCRIPT="DIR ${{ env.pkg-path-in-channel }} /s/b | FINDSTR /r "dpnp-.*\.tar\.bz2""
FOR /F "tokens=* USEBACKQ" %%F IN (`%SEARCH_SCRIPT%`) DO (
SET FULL_PACKAGE_PATH=%%F
)
echo FULL_PACKAGE_PATH: %FULL_PACKAGE_PATH%
python -c "import shutil; shutil.unpack_archive(r\"%FULL_PACKAGE_PATH%\", extract_dir=r\"${{ env.extracted-pkg-path }}\")"
dir ${{ env.extracted-pkg-path }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: ${{ env.TEST_ENV_NAME }}
- name: Store conda paths as envs
run: |
@echo on
(echo CONDA_LIB_PATH=%CONDA_PREFIX%\Library\lib\) >> %GITHUB_ENV%
(echo CONDA_LIB_BIN_PATH=%CONDA_PREFIX%\Library\bin\) >> %GITHUB_ENV%
- name: Install conda-index
run: conda install conda-index=${{ env.CONDA_INDEX_VERSION}}
- name: Create conda channel
run: |
@echo on
python -m conda_index ${{ env.channel-path }}
- name: Test conda channel
run: |
@echo on
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}
- name: Dump version.json
run: more ${{ env.ver-json-path }}
- name: Collect dependencies
run: |
@echo on
set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
echo PACKAGE_VERSION: %PACKAGE_VERSION%
(echo PACKAGE_VERSION=%PACKAGE_VERSION%) >> %GITHUB_ENV%
conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: Dump lockfile
run: more lockfile
- name: Cache conda packages
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install dpnp
run: |
@echo on
conda install ${{ env.PACKAGE_NAME }}=${{ env.PACKAGE_VERSION }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'
- name: List installed packages
run: conda list
- name: Activate OCL CPU RT
shell: pwsh
run: |
$script_path="$env:CONDA_PREFIX\Scripts\set-intel-ocl-icd-registry.ps1"
&$script_path
# Check the variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default
$cl_cfg="$env:CONDA_PREFIX\Library\lib\cl.cfg"
Get-Content -Tail 5 -Path $cl_cfg
- name: Smoke test
run: |
python -c "import dpnp, dpctl; dpctl.lsplatform()"
python -c "import dpnp; print(dpnp.__version__)"
# TODO: run the whole scope once the issues on CPU are resolved
# - name: Run tests
# run: |
# python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
# working-directory: ${{ env.tests-path }}
# TODO: remove once 2024.2 release is published
- name: Run tests
id: run_tests_win
uses: nick-fields/retry@7152eba30c6575329ac0576536151aca5a72780e # v3.0.0
with:
shell: cmd
timeout_minutes: 15
max_attempts: 5
retry_on: any
command: >-
conda activate ${{ env.TEST_ENV_NAME }}
& cd ${{ env.tests-path }}
& python -m pytest -q -ra --disable-warnings -vv ${{ env.TEST_SCOPE }}
upload:
name: Upload ['${{ matrix.os }}', python='${{ matrix.python }}']
needs: [test_linux, test_windows]
strategy:
matrix:
python: ['3.9', '3.10', '3.11']
os: [ubuntu-20.04, windows-latest]
runs-on: ${{ matrix.os }}
defaults:
run:
shell: ${{ matrix.os == 'windows-latest' && 'cmd /C CALL {0}' || 'bash -l {0}' }}
continue-on-error: true
if: |
(github.repository == 'IntelPython/dpnp') &&
(github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/'))
steps:
- name: Download artifact
uses: actions/download-artifact@8caf195ad4b1dee92908e23f56eeb0696f1dd42d # v4.1.5
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'upload'
- name: Install anaconda-client
run: conda install anaconda-client
- name: Upload
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2
env:
ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
cleanup_packages:
name: Clean up anaconda packages
needs: [upload]
runs-on: 'ubuntu-latest'
defaults:
run:
shell: bash -el {0}
steps:
- uses: conda-incubator/setup-miniconda@030178870c779d9e5e1b4e563269f3aa69b04081 # v3.0.3
with:
run-post: false
channel-priority: "disabled"
channels: conda-forge
python-version: '3.11'
- name: Install anaconda-client
run: conda install anaconda-client
- name: Checkout repo
uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3
with:
repository: IntelPython/devops-tools
fetch-depth: 0
- name: Cleanup old packages
run: |
python scripts/cleanup-old-packages.py \
--verbose --force --token ${{ secrets.ANACONDA_TOKEN }} \
--package dppy/${{ env.PACKAGE_NAME }} --label dev