Skip to content

Alternative Random Ray Volume Estimators (#3060) #176

Alternative Random Ray Volume Estimators (#3060)

Alternative Random Ray Volume Estimators (#3060) #176

Workflow file for this run

name: CI
on:
# allows us to run workflows manually
workflow_dispatch:
pull_request:
branches:
- develop
- master
push:
branches:
- develop
- master
env:
MPI_DIR: /usr
HDF5_ROOT: /usr
OMP_NUM_THREADS: 2
COVERALLS_PARALLEL: true
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
main:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ["3.10"]
mpi: [n, y]
omp: [n, y]
dagmc: [n]
ncrystal: [n]
libmesh: [n]
event: [n]
vectfit: [n]
include:
- python-version: "3.11"
omp: n
mpi: n
- python-version: "3.12"
omp: n
mpi: n
- dagmc: y
python-version: "3.10"
mpi: y
omp: y
- ncrystal: y
python-version: "3.10"
mpi: n
omp: n
- libmesh: y
python-version: "3.10"
mpi: y
omp: y
- libmesh: y
python-version: "3.10"
mpi: n
omp: y
- event: y
python-version: "3.10"
omp: y
mpi: n
- vectfit: y
python-version: "3.10"
omp: n
mpi: y
name: "Python ${{ matrix.python-version }} (omp=${{ matrix.omp }},
mpi=${{ matrix.mpi }}, dagmc=${{ matrix.dagmc }}, ncrystal=${{ matrix.ncrystal }},
libmesh=${{ matrix.libmesh }}, event=${{ matrix.event }}
vectfit=${{ matrix.vectfit }})"
env:
MPI: ${{ matrix.mpi }}
PHDF5: ${{ matrix.mpi }}
OMP: ${{ matrix.omp }}
DAGMC: ${{ matrix.dagmc }}
NCRYSTAL: ${{ matrix.ncrystal }}
EVENT: ${{ matrix.event }}
VECTFIT: ${{ matrix.vectfit }}
LIBMESH: ${{ matrix.libmesh }}
NPY_DISABLE_CPU_FEATURES: "AVX512F AVX512_SKX"
OPENBLAS_NUM_THREADS: 1
# libfabric complains about fork() as a result of using Python multiprocessing.
# We can work around it with RDMAV_FORK_SAFE=1 in libfabric < 1.13 and with
# FI_EFA_FORK_SAFE=1 in more recent versions.
RDMAV_FORK_SAFE: 1
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Environment Variables
run: |
echo "DAGMC_ROOT=$HOME/DAGMC"
echo "OPENMC_CROSS_SECTIONS=$HOME/nndc_hdf5/cross_sections.xml" >> $GITHUB_ENV
echo "OPENMC_ENDF_DATA=$HOME/endf-b-vii.1" >> $GITHUB_ENV
- name: Apt dependencies
shell: bash
run: |
sudo apt -y update
sudo apt install -y libpng-dev \
libnetcdf-dev \
libpnetcdf-dev \
libhdf5-serial-dev \
libeigen3-dev
- name: Optional apt dependencies for MPI
shell: bash
if: ${{ matrix.mpi == 'y' }}
run: |
sudo apt install -y libhdf5-mpich-dev \
libmpich-dev
sudo update-alternatives --set mpi /usr/bin/mpicc.mpich
sudo update-alternatives --set mpirun /usr/bin/mpirun.mpich
sudo update-alternatives --set mpi-x86_64-linux-gnu /usr/include/x86_64-linux-gnu/mpich
- name: Optional apt dependencies for vectfit
shell: bash
if: ${{ matrix.vectfit == 'y' }}
run: sudo apt install -y libblas-dev liblapack-dev
- name: install
shell: bash
run: |
echo "$HOME/NJOY2016/build" >> $GITHUB_PATH
$GITHUB_WORKSPACE/tools/ci/gha-install.sh
- name: cache-xs
uses: actions/cache@v4
with:
path: |
~/nndc_hdf5
~/endf-b-vii.1
key: ${{ runner.os }}-build-xs-cache
- name: before
shell: bash
run: $GITHUB_WORKSPACE/tools/ci/gha-before-script.sh
- name: test
shell: bash
run: |
CTEST_OUTPUT_ON_FAILURE=1 make test -C $GITHUB_WORKSPACE/build/
$GITHUB_WORKSPACE/tools/ci/gha-script.sh
- name: after_success
shell: bash
run: |
cpp-coveralls -i src -i include -e src/external --exclude-pattern "/usr/*" --dump cpp_cov.json
coveralls --merge=cpp_cov.json --service=github
finish:
needs: main
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: coverallsapp/github-action@v2
with:
github-token: ${{ secrets.github_token }}
parallel-finished: true