From 3e7434cb84b436d748a52a2d568a470951ea97c3 Mon Sep 17 00:00:00 2001 From: Goran Jelic-Cizmek Date: Mon, 11 Nov 2024 17:09:13 +0100 Subject: [PATCH] Revert "Remove CI to build faster" This reverts commit bea2ceb60a51c0c83239ae086652c4cdf4d56a68. --- .circleci/config.yml | 106 +++++++ .github/workflows/coverage.yml | 193 +++++++++++++ .github/workflows/docs.yml | 80 ++++++ .github/workflows/external.yml | 71 +++++ .github/workflows/neuron-ci.yml | 475 ++++++++++++++++++++++++++++++++ .github/workflows/release.yml | 91 ++++++ .github/workflows/windows.yml | 104 +++++++ azure-pipelines.yml | 83 ++++++ 8 files changed, 1203 insertions(+) create mode 100644 .circleci/config.yml create mode 100644 .github/workflows/coverage.yml create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/external.yml create mode 100644 .github/workflows/neuron-ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/windows.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000000..737c5cc777 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,106 @@ +version: 2.1 + +orbs: + python: circleci/python@2.1.1 + +jobs: + manylinux2014-aarch64: + + parameters: + NRN_PYTHON_VERSION: + type: string + NRN_NIGHTLY_UPLOAD: + type: string + + machine: + image: default + + resource_class: arm.medium + + steps: + - checkout + - run: + name: Build manylinux AArch64 wheel + command: | + docker run --rm \ + -w /root/nrn \ + -v $PWD:/root/nrn \ + -v /opt/nrnwheel/mpt:/nrnwheel/mpt \ + -e NEURON_NIGHTLY_TAG \ + -e NRN_NIGHTLY_UPLOAD \ + -e NRN_RELEASE_UPLOAD \ + -e SETUPTOOLS_SCM_PRETEND_VERSION \ + -e NRN_BUILD_FOR_UPLOAD=1 \ + 'neuronsimulator/neuron_wheel:latest-gcc9-aarch64' \ + packaging/python/build_wheels.bash linux << parameters.NRN_PYTHON_VERSION >> coreneuron + + - store_artifacts: + path: ./wheelhouse + destination: artifacts + + - run: + name: Test manylinux AArch64 wheel + command: | + + # install mpi dependencies + sudo apt update + sudo apt install -y mpich openmpi-bin libopenmpi-dev libmpich-dev + + # choose available python versions from pyenv + pyenv_py_ver="" + case << parameters.NRN_PYTHON_VERSION >> in + 38) pyenv_py_ver="3.8" ;; + 39) pyenv_py_ver="3.9" ;; + 310) pyenv_py_ver="3.10" ;; + 311) pyenv_py_ver="3.11" ;; + 312) pyenv_py_ver="3.12" ;; + *) echo "Error: pyenv python version not specified or not supported." && exit 1;; + esac + + env PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install $pyenv_py_ver --force + pyenv global $pyenv_py_ver + export PYTHON_EXE=$(which python) + + # test wheel + packaging/python/test_wheels.sh $PYTHON_EXE $(ls -t wheelhouse/*.whl) + + - run: + name: Upload nightly wheel to pypi.org + command: | + if [ "<< parameters.NRN_NIGHTLY_UPLOAD >>" == "true" ]; then + python -m pip install --upgrade pip + python -m pip install twine + python -m twine upload --verbose --skip-existing -u $TWINE_USERNAME -p $TWINE_PASSWORD wheelhouse/*.whl + else + echo "Skipping pypi.org upload!" + fi + +workflows: + + build-workflow: + jobs: + - manylinux2014-aarch64: + filters: + branches: + only: + - /release\/.*/ + - /circleci\/.*/ + matrix: + parameters: + NRN_PYTHON_VERSION: ["312"] + NRN_NIGHTLY_UPLOAD: ["false"] + + nightly: + triggers: + - schedule: + cron: "0 0 * * *" + filters: + branches: + only: + - master + jobs: + - manylinux2014-aarch64: + matrix: + parameters: + NRN_PYTHON_VERSION: ["38", "39", "310", "311", "312"] + NRN_NIGHTLY_UPLOAD: ["true"] diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 0000000000..ce0bb21bae --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,193 @@ +name: NEURON Code Coverage + +concurrency: + # Don't cancel on master, creating a PR when a push workflow is already going will cancel the push workflow in favour of the PR workflow + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.event.number && github.head_ref || github.ref_name }} + cancel-in-progress: true + +on: + merge_group: + push: + branches: + - master + - release/** + pull_request: + branches: + - master + - release/** +# TODO : https://github.com/neuronsimulator/nrn/issues/1063 +# paths-ignore: +# - '**.md' +# - '**.rst' +# - 'docs/**' + +env: + PY_MIN_VERSION: '3.8' + PY_MID_VERSION: '3.10' + PY_MAX_VERSION: '3.12' + +jobs: + coverage: + runs-on: ubuntu-22.04 + + name: Code Coverage + + timeout-minutes: 60 + + env: + DISPLAY: ${{ ':0' }} + MUSIC_INSTALL_DIR: /opt/MUSIC + MUSIC_VERSION: 1.2.0 + + steps: + + - name: Install apt packages + run: | + sudo apt-get install xfonts-100dpi build-essential doxygen lcov libboost-all-dev libopenmpi-dev libmpich-dev libx11-dev libxcomposite-dev mpich openmpi-bin gpg ninja-build flex bison libfl-dev + shell: bash + + - name: Setup Caliper profiler + run: | + git clone https://github.com/LLNL/Caliper.git + cd Caliper + mkdir build && cd build + cmake .. + make && sudo make install + + - name: Setup MUSIC@${{ env.MUSIC_VERSION }} + run: | + python3 -m venv music-venv + source music-venv/bin/activate + python3 -m pip install 'mpi4py<4' cython numpy setuptools + sudo mkdir -p $MUSIC_INSTALL_DIR + sudo chown -R $USER $MUSIC_INSTALL_DIR + curl -L -o MUSIC.zip https://github.com/INCF/MUSIC/archive/refs/tags/${MUSIC_VERSION}.zip + unzip MUSIC.zip && mv MUSIC-* MUSIC && cd MUSIC + ./autogen.sh + ./configure --with-python-sys-prefix --prefix=$MUSIC_INSTALL_DIR --disable-anysource + make -j install + deactivate + working-directory: ${{runner.temp}} + + - name: Setup Xvfb + run: | + sudo apt-get install xvfb + sudo /usr/bin/Xvfb $DISPLAY -screen 0 1600x1200x24 -noreset -nolock -shmem & # run in bg + + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Clone nmodl + working-directory: ${{runner.workspace}}/nrn + run: | + git submodule update --init --recursive --force --depth 1 -- external/nmodl + + - name: Set up Python@${{ env.PY_MIN_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PY_MIN_VERSION }} + + - name: Install Python@${{ env.PY_MIN_VERSION }} dependencies + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade -r external/nmodl/requirements.txt + python -m pip install --upgrade -r ci_requirements.txt + python -m pip install --upgrade pip -r nrn_requirements.txt + + - name: Set up Python@${{ env.PY_MID_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PY_MID_VERSION }} + + - name: Set up Python@${{ env.PY_MAX_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PY_MAX_VERSION }} + + - name: Install Python@${{ env.PY_MAX_VERSION }} dependencies + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade -r external/nmodl/requirements.txt + python -m pip install --upgrade -r ci_requirements.txt + python -m pip install --upgrade pip -r nrn_requirements.txt + + + - name: Build & Test + id: build-test + shell: bash + working-directory: ${{runner.workspace}}/nrn + run: | + export SHELL="/bin/bash" + + # Compiler setup + export CC=gcc + export CXX=g++ + + # Python setup + export PYTHON_MIN=$(which $PYTHON_MIN_NAME); + export PYTHON_MID=$(which $PYTHON_MID_NAME); + export PYTHON_MAX=$(which $PYTHON_MAX_NAME); + + mkdir build && cd build; + + # CMake options & flags + cmake_args=(-G Ninja \ + -DCMAKE_BUILD_TYPE=Debug \ + -DCMAKE_C_COMPILER="$CC" \ + -DCMAKE_CXX_COMPILER="$CXX" \ + -DNRN_ENABLE_BACKTRACE=ON \ + -DNRN_ENABLE_CORENEURON=ON \ + -DNRN_ENABLE_COVERAGE=ON \ + -DNRN_ENABLE_INTERVIEWS=ON \ + -DNRN_ENABLE_MPI=ON \ + -DNRN_ENABLE_PERFORMANCE_TESTS=OFF \ + -DNRN_ENABLE_PROFILING=ON \ + -DNRN_ENABLE_PYTHON=ON \ + -DNRN_ENABLE_PYTHON_DYNAMIC=ON \ + -DNRN_PYTHON_DYNAMIC="${PYTHON_MIN};${PYTHON_MAX}" \ + -DNRN_PYTHON_EXTRA_FOR_TESTS=${PYTHON_MID} \ + -DNRN_ENABLE_TESTS=ON \ + -DNRN_ENABLE_MUSIC=ON \ + -DCMAKE_PREFIX_PATH="${MUSIC_INSTALL_DIR}" \ + -DMUSIC_ROOT="${MUSIC_INSTALL_DIR}") + cmake .. "${cmake_args[@]}" + # Coverage + # The Linux runners apparently have 2 cores, but jobs were being killed when we did not specify this explicitly. + # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources + # By default we get a modern version of CMake that understands --parallel. + cmake --build . --parallel 2 + cmake --build . --target cover_baseline + xvfb-run ctest --rerun-failed --output-on-failure; + for python in "${PYTHON_MIN}" "${PYTHON_MAX}" + do + echo "Using ${python}" + NEURONHOME="${PWD}/share/nrn" \ + PYTHONPATH="${PWD}/lib/python:${PYTHONPATH}" \ + PATH="${PWD}/bin:${PATH}" \ + LD_LIBRARY_PATH="${PWD}/lib:${LD_LIBRARY_PATH}" \ + DYLD_LIBRARY_PATH="${PWD}/lib:${DYLD_LIBRARY_PATH}" \ + "${python}" -c "from neuron import h; import neuron; neuron.test();neuron.test_rxd();" + done + cmake --build . --target cover_collect + cmake --build . --target cover_combine + env: + MATRIX_EVAL: "CC=gcc CXX=g++" + PYTHON_MIN_NAME: "python${{ env.PY_MIN_VERSION }}" + PYTHON_MID_NAME: "python${{ env.PY_MID_VERSION }}" + PYTHON_MAX_NAME: "python${{ env.PY_MAX_VERSION }}" + + # This step will set up an SSH connection on tmate.io for live debugging. + # To enable it, you have to: + # * add 'live-debug-coverage' to your PR title + # * push something to your PR branch (note that just re-running the pipeline disregards the title update) + - name: live debug session on failure (manual steps required, check `.github/coverage.yml`) + if: failure() && contains(github.event.pull_request.title, 'live-debug-coverage') + uses: mxschmitt/action-tmate@v3 + + - uses: codecov/codecov-action@v4 + with: + directory: ./build + fail_ci_if_error: true + verbose: true + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000000..2ef1052dd7 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,80 @@ +name: NEURON Documentation + +concurrency: + # Don't cancel on master, creating a PR when a push workflow is already going will cancel the push workflow in favour of the PR workflow + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.event.number && github.head_ref || github.ref_name }} + cancel-in-progress: true + +on: + merge_group: + push: + branches: + - master + - release/** + pull_request: + branches: + - master + - release/** + +env: + DEFAULT_PY_VERSION: '3.12' + +jobs: + documentation: + runs-on: ubuntu-20.04 + + name: Documentation + + timeout-minutes: 25 + + steps: + + - name: Install apt packages + run: | + sudo apt-get update --fix-missing + sudo apt-get install build-essential libopenmpi-dev libmpich-dev libx11-dev libxcomposite-dev mpich openmpi-bin + sudo apt-get install ffmpeg doxygen pandoc + shell: bash + + - name: Set up Python@${{ env.DEFAULT_PY_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.DEFAULT_PY_VERSION }} + + - uses: actions/checkout@v4 + + - name: Install Python dependencies + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade pip -r nrn_requirements.txt + + - name: Install Python documentation dependencies + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade -r docs/docs_requirements.txt + + - name: Documentation + id: documentation + shell: bash + working-directory: ${{runner.workspace}}/nrn + run: | + echo "-------- NEURON wheel --------"; + python setup.py build_ext bdist_wheel; + neuron_wheel=dist/NEURON*.whl; + echo "-------- install wheel --------" + python -m pip install $neuron_wheel; + echo "-------- now build docs--------"; + python setup.py docs; + echo "-------- disable jekyll--------"; + pushd docs/_build; + touch .nojekyll; + echo "-------- OK to deploy! --------"; + echo "OK_TO_DEPLOY_DOCS=true" >> $GITHUB_ENV + + # This step will set up an SSH connection on tmate.io for live debugging. + # To enable it, you have to: + # * add 'live-debug-docs' to your PR title + # * push something to your PR branch (note that just re-running the pipeline disregards the title update) + - name: live debug session on failure (manual steps required, check `.github/docs.yml`) + if: failure() && contains(github.event.pull_request.title, 'live-debug-docs') + uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/external.yml b/.github/workflows/external.yml new file mode 100644 index 0000000000..ab06e55459 --- /dev/null +++ b/.github/workflows/external.yml @@ -0,0 +1,71 @@ +name: External CIs + +concurrency: + # Don't cancel on master, creating a PR when a push workflow is already going will cancel the push workflow in favour of the PR workflow + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.event.number && github.head_ref || github.ref_name }} + cancel-in-progress: true + +on: + pull_request: + types: [ labeled ] +env: + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +jobs: + get-last-azure-url: + runs-on: ubuntu-latest + if: ${{ github.event.label.name == 'nrn-modeldb-ci-nightly' }} + outputs: + azure_drop_url: ${{ steps.drop.outputs.azure_drop_url }} + pr_azure_sha: ${{ steps.drop.outputs.pr_azure_sha }} + steps: + - id: drop + run: | + # use jq to get the last Azure drop URL from the PR and the SHA1 from the same body + export pr_json=$(gh pr view $PR_URL --json comments -q 'last(.comments[] .body | capture(".*(?[0-9a-f]{40}).*?(?https://dev.azure.com/neuronsimulator/.*=zip)"))') + if [ -z "$pr_json" ]; then + echo "Unable to retrieve AZURE drop url and SHA from comments!" + exit 1 + fi + # use jq to get pr_azure_sha and azure_drop_url from the json + export azure_drop_url=$(echo $pr_json | jq -r .azure_drop_url) + export pr_azure_sha=$(echo $pr_json | jq -r .pr_azure_sha) + echo azure_drop_url=$azure_drop_url >> $GITHUB_OUTPUT + echo pr_azure_sha=$pr_azure_sha >> $GITHUB_OUTPUT + + - id: remove-label + if: always() + run: | + # remove the label + gh pr edit $PR_URL --remove-label nrn-modeldb-ci-nightly + # if we encounter an error in last github action step, add a comment + if [ ${{ steps.drop.outcome }} == 'failure' ]; then + gh pr comment $PR_URL --body "Unable to retrieve AZURE drop url from comments!" + else + gh pr comment $PR_URL --body "NEURON ModelDB CI: launching for ${pr_azure_sha} via its [drop url]($azure_drop_url)" + fi + env: + pr_azure_sha: ${{ steps.drop.outputs.pr_azure_sha }} + azure_drop_url: ${{ steps.drop.outputs.azure_drop_url }} + + nrn-modeldb-ci: + needs: get-last-azure-url + uses: neuronsimulator/nrn-modeldb-ci/.github/workflows/nrn-modeldb-ci.yaml@master + with: + neuron_v1: ${{needs.get-last-azure-url.outputs.azure_drop_url}} + neuron_v2: neuron-nightly + + pr-update: + needs: + - nrn-modeldb-ci + - get-last-azure-url + runs-on: ubuntu-latest + steps: + - run: | + gh pr comment $PR_URL --body "NEURON ModelDB CI: ${pr_azure_sha} -> download reports [from here](${ARTIFACTS_URL})" + name: Post NEURON ModelDB CI Artifact URL + if: always() || cancelled() + env: + ARTIFACTS_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + pr_azure_sha: ${{ needs.get-last-azure-url.outputs.pr_azure_sha }} diff --git a/.github/workflows/neuron-ci.yml b/.github/workflows/neuron-ci.yml new file mode 100644 index 0000000000..0c82d98836 --- /dev/null +++ b/.github/workflows/neuron-ci.yml @@ -0,0 +1,475 @@ +name: NEURON CI + +concurrency: + # Don't cancel on master, creating a PR when a push workflow is already going will cancel the push workflow in favour of the PR workflow + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.event.number && github.head_ref || github.ref_name }} + cancel-in-progress: true + +on: + merge_group: + push: + branches: + # If nothing else, this is important for the ccache logic below... + - master + - release/** + pull_request: + branches: + - master + - release/** +# TODO : https://github.com/neuronsimulator/nrn/issues/1063 +# paths-ignore: +# - '**.md' +# - '**.rst' +# - 'docs/**' + + +jobs: + ci: + runs-on: ${{ matrix.os }} + + name: ${{ matrix.os }} - ${{ matrix.config.build_mode }} (${{ matrix.config.cmake_option }}${{ matrix.config.config_options }}${{ matrix.config.matrix_eval }}${{ matrix.config.sanitizer }}) + + timeout-minutes: 75 + + env: + INSTALL_DIR: install + SDK_ROOT: $(xcrun --sdk macosx --show-sdk-path) + SKIP_WHEELHOUSE_REPAIR: true + BUILD_TYPE: Release + DESIRED_CMAKE_VERSION: 3.17 + DYNAMIC_PYTHON_CMAKE_VERSION: 3.18 + PY_MIN_VERSION: ${{ matrix.config.python_min_version || '3.8' }} + PY_MAX_VERSION: ${{ matrix.config.python_max_version || '3.12' }} + MUSIC_INSTALL_DIR: /opt/MUSIC + MUSIC_VERSION: 1.2.1 + + strategy: + matrix: + os: [macOS-13, ubuntu-20.04] + config: + - { matrix_eval : "CC=gcc-9 CXX=g++-9", build_mode: "setuptools"} + - { matrix_eval : "CC=gcc-10 CXX=g++-10", build_mode: "cmake", music: ON} + - { matrix_eval : "CC=gcc-10 CXX=g++-10", build_mode: "cmake", python_dynamic: ON} + - { matrix_eval : "CC=gcc-9 CXX=g++-9" , build_mode: "cmake", cmake_option: "-DNRN_ENABLE_CORENEURON=ON"} + - { matrix_eval : "CC=gcc-9 CXX=g++-9", build_mode: "cmake", cmake_option: "-DNRN_ENABLE_MPI=OFF -DNRN_ENABLE_INTERVIEWS=OFF -DNRN_ENABLE_CORENEURON=ON"} + - { matrix_eval : "CC=gcc-10 CXX=g++-10", build_mode: "cmake", cmake_option: "-DNRN_ENABLE_PYTHON=OFF -DNRN_ENABLE_RX3D=OFF -DNRN_ENABLE_CORENEURON=ON"} + include: + - os: ubuntu-22.04 + config: + build_mode: cmake + cmake_option: -DNRN_ENABLE_CORENEURON=ON + -DNRN_ENABLE_INTERVIEWS=OFF -DNMODL_SANITIZERS=undefined + flag_warnings: ON + sanitizer: undefined + - os: ubuntu-22.04 + config: + build_mode: cmake + # TODO: CoreNEURON is only LeakSanitizer-clean if we disable MPI + cmake_option: -DNRN_ENABLE_CORENEURON=ON + -DNRN_ENABLE_INTERVIEWS=OFF -DNMODL_SANITIZERS=address + # TODO: address-leak is the dream, but there are many problems, + # including external ones from the MPI implementations + sanitizer: address + - os: ubuntu-24.04 + config: + build_mode: cmake + # Cannot use a non-instrumented OpenMP with TSan, and we don't + # have a TSan-instrumented OpenMP runtime available. + # TODO: debug RX3D + TSan + cmake_option: -DNRN_ENABLE_CORENEURON=ON -DNRN_ENABLE_MPI=OFF + -DCORENRN_ENABLE_OPENMP=OFF -DNRN_ENABLE_RX3D=OFF + sanitizer: thread + - os: macOS-13 + config: + build_mode: cmake + # TODO: investigate rxd test timeouts in this build and re-enable them + cmake_option: -DNRN_ENABLE_CORENEURON=ON -DNRN_ENABLE_INTERVIEWS=OFF + -DNRN_ENABLE_RX3D=OFF -DNMODL_SANITIZERS=address + sanitizer: address + - os: macOS-14 + config: + build_mode: cmake + # TODO: investigate rxd test timeouts in this build and re-enable them + cmake_option: -DNRN_ENABLE_CORENEURON=ON -DNRN_ENABLE_INTERVIEWS=OFF + -DNRN_ENABLE_RX3D=OFF -DNMODL_SANITIZERS=address + sanitizer: thread + fail-fast: false + + steps: + + - name: Fix kernel mmap rnd bits + # Asan in llvm 14 provided in ubuntu 22.04 is incompatible with + # high-entropy ASLR in much newer kernels that GitHub runners are + # using leading to random crashes: https://reviews.llvm.org/D148280 + run: sudo sysctl vm.mmap_rnd_bits=28 + if: matrix.os == 'ubuntu-22.04' + + - name: Setup cmake + uses: jwlawson/actions-setup-cmake@v2 + with: + cmake-version : ${{(matrix.config.python_dynamic || matrix.config.build_mode == 'setuptools') && env.DYNAMIC_PYTHON_CMAKE_VERSION || env.DESIRED_CMAKE_VERSION}} + + - name: Install homebrew packages + if: startsWith(matrix.os, 'macOS') + run: | + # Unlink and re-link to prevent errors when GitHub macOS runner images + # install Python outside of brew; See actions/setup-python#577 and BlueBrain/libsonata/pull/317 + brew list -1 | grep python | while read formula; do brew unlink $formula; brew link --overwrite $formula; done + brew install ccache coreutils doxygen flex bison mpich ninja xz autoconf automake libtool + # We use both for dynamic mpi in nrn + brew unlink mpich + brew install openmpi + brew install --cask xquartz + if [[ "${{matrix.os}}" == "macOS-14" ]]; then + brew install cmake + echo "$(brew --prefix)/opt/cmake/bin" >> $GITHUB_PATH + fi + echo "$(brew --prefix)/opt/flex/bin:$(brew --prefix)/opt/bison/bin" >> $GITHUB_PATH + # Core https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + if [[ "${{matrix.os}}" == "macOS-13" ]]; then + echo CMAKE_BUILD_PARALLEL_LEVEL=4 >> $GITHUB_ENV + echo CTEST_PARALLEL_LEVEL=4 >> $GITHUB_ENV + else + echo CMAKE_BUILD_PARALLEL_LEVEL=3 >> $GITHUB_ENV + echo CTEST_PARALLEL_LEVEL=3 >> $GITHUB_ENV + fi + echo CI_OS_NAME=osx >> $GITHUB_ENV + shell: bash + + - name: Install apt packages + if: startsWith(matrix.os, 'ubuntu') + run: | + sudo apt-get install build-essential ccache libopenmpi-dev \ + libmpich-dev libx11-dev libxcomposite-dev mpich ninja-build \ + openmpi-bin flex libfl-dev bison libreadline-dev + # The sanitizer builds use ubuntu 22.04 + if [[ "${{matrix.os}}" == "ubuntu-20.04" ]]; then + sudo apt-get install g++-7 g++-8 + fi + # Core https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + echo CMAKE_BUILD_PARALLEL_LEVEL=4 >> $GITHUB_ENV + echo CTEST_PARALLEL_LEVEL=4 >> $GITHUB_ENV + echo CI_OS_NAME=linux >> $GITHUB_ENV + shell: bash + + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + + - name: Clone nmodl + working-directory: ${{runner.workspace}}/nrn + run: | + git submodule update --init --recursive --force --depth 1 -- external/nmodl + + - name: Set up Python@${{ env.PY_MIN_VERSION }} + if: ${{matrix.config.python_dynamic == 'ON'}} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PY_MIN_VERSION }} + + - name: Install Python@${{ env.PY_MIN_VERSION }} dependencies + if: ${{ matrix.config.python_dynamic == 'ON' }} + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade -r external/nmodl/requirements.txt + python -m pip install --upgrade -r ci_requirements.txt + python -m pip install --upgrade pip -r nrn_requirements.txt + + - name: Set up Python@${{ env.PY_MAX_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PY_MAX_VERSION }} + + - name: Install Python@${{ env.PY_MAX_VERSION }} dependencies + working-directory: ${{runner.workspace}}/nrn + run: | + python -m pip install --upgrade -r external/nmodl/requirements.txt + python -m pip install --upgrade -r ci_requirements.txt + python -m pip install --upgrade pip -r nrn_requirements.txt + + - name: Install a new automake + # A automake >= 1.16.5 is needed for python 3.12 because it generates a python script + # called py-compile and the original one is not supporting this version of python + # Once ubuntu got a newer version of automake we can remove this part. + if: matrix.config.music == 'ON' && startsWith(matrix.os, 'ubuntu') + run: | + curl -L -o automake.tar.xz https://ftpmirror.gnu.org/gnu/automake/automake-1.16.5.tar.xz + tar -xf automake.tar.xz + cd automake-1.16.5/ + ./configure --prefix=/usr/ + make -j + sudo make -j install + automake --version + working-directory: ${{runner.temp}} + + - name: Setup MUSIC@${{ env.MUSIC_VERSION }} + if: matrix.config.music == 'ON' + run: | + python3 -m venv music-venv + source music-venv/bin/activate + python3 -m pip install 'mpi4py<4' cython numpy setuptools + sudo mkdir -p $MUSIC_INSTALL_DIR + sudo chown -R $USER $MUSIC_INSTALL_DIR + curl -L -o MUSIC.zip https://github.com/INCF/MUSIC/archive/refs/tags/${MUSIC_VERSION}.zip + unzip MUSIC.zip && mv MUSIC-* MUSIC && cd MUSIC + ./autogen.sh + # on some systems MPI library detection fails, provide exact flags/compilers + ./configure --with-python-sys-prefix --prefix=$MUSIC_INSTALL_DIR --disable-anysource MPI_CXXFLAGS="-g -O3" MPI_CFLAGS="-g -O3" MPI_LDFLAGS=" " CC=mpicc CXX=mpicxx + make -j install + deactivate + working-directory: ${{runner.temp}} + + - name: Register gcc problem matcher + if: ${{matrix.config.flag_warnings == 'ON'}} + run: echo "::add-matcher::.github/problem-matchers/gcc.json" + + - name: Register sanitizer problem matcher + if: ${{matrix.config.sanitizer}} + run: echo "::add-matcher::.github/problem-matchers/${{matrix.config.sanitizer}}.json" + + - name: Hash config dictionary + run: | + cat << EOF > matrix.json + ${{toJSON(matrix.config)}} + EOF + echo matrix.config JSON: + cat matrix.json + echo ----- + + - name: Restore compiler cache + uses: actions/cache/restore@v4 + id: restore-compiler-cache + with: + path: ${{runner.workspace}}/ccache + key: ${{matrix.os}}-${{hashfiles('matrix.json')}}-${{github.ref}}-${{github.sha}} + restore-keys: | + ${{matrix.os}}-${{hashfiles('matrix.json')}}-${{github.ref}}- + ${{matrix.os}}-${{hashfiles('matrix.json')}}- + + - name: Build and Test + id: build-test + shell: bash + working-directory: ${{runner.workspace}}/nrn + run: | + # OS related + if [ "$RUNNER_OS" == "Linux" ]; then + export ${MATRIX_EVAL}; + export SHELL="/bin/bash" + else + export CXX=${CXX:-g++}; + export CC=${CC:-gcc}; + fi + if [ "$RUNNER_OS" == "macOS" ]; then + # TODO - this is a workaround that was implemented for Azure being reported as getting stuck. + # However it does not get stuck: neuron module not found and script goes to interpreter, seeming stuck. + # This needs to be addressed and SKIP_EMBEDED_PYTHON_TEST logic removed everywhere. + export SKIP_EMBEDED_PYTHON_TEST="true" + # long TMPDIR path on MacOS can results into runtime failures with OpenMPI + # Set shorter path as discussed in https://github.com/open-mpi/ompi/issues/8510 + export TMPDIR=/tmp/$GITHUB_JOB + mkdir -p $TMPDIR + fi + + # Python setup + export PYTHONPATH=$PYTHONPATH:$INSTALL_DIR/lib/python/ + # Python setup + export PYTHON_MIN=$(command -v $PYTHON_MIN_NAME); + export PYTHON_MAX=$(command -v $PYTHON_MAX_NAME); + export PYTHON=$PYTHON_MAX + if [ "$RUNNER_OS" == "macOS" ]; then + # Python is not installed as a framework, so we need to writ 'backend: TkAgg' to `matplotlibrc`. + # Since we are in a virtual environment, we cannot use `$HOME/matplotlibrc` + # The following solution is generic and relies on `matplotlib.__file__` to know where to append backend setup. + $PYTHON -c "import os,matplotlib; f =open(os.path.join(os.path.dirname(matplotlib.__file__), 'mpl-data/matplotlibrc'),'a'); f.write('backend: TkAgg');f.close();" + fi; + + # Some logging + echo $LANG + echo $LC_ALL + python3 -c 'import os,sys; os.set_blocking(sys.stdout.fileno(), True)' + cmake --version + + # different builds with CMake + if [[ "$BUILD_MODE" == "cmake" ]]; then + cmake_args=(-G Ninja) + # Sanitizer-specific setup + if [[ -n "${{matrix.config.sanitizer}}" ]]; then + if [ "$RUNNER_OS" == "Linux" ]; then + if [[ "${{matrix.config.sanitizer}}" == "thread" ]]; then + # GitHub/ubuntu-22.04 + clang-14 seems to have problems with TSan. + # Vanilla 22.04 + clang-16 from apt.llvm.org seemed to work. + # Use gcc-12 instead, as GitHub/ubuntu-22.04 already has it. + CC=$(command -v gcc-12) + CXX=$(command -v g++-12) + else + CC=$(command -v clang-14) + CXX=$(command -v clang++-14) + symbolizer_path="$(readlink -f "$(command -v llvm-symbolizer-14)")" + cmake_args+=(-DLLVM_SYMBOLIZER_PATH="${symbolizer_path}") + fi + fi + cmake_args+=(-DCMAKE_BUILD_TYPE=Custom \ + -DCMAKE_C_FLAGS="-O1 -g" \ + -DCMAKE_CXX_FLAGS="-O1 -g" \ + -DNRN_SANITIZERS=$(echo ${{matrix.config.sanitizer}} | sed -e 's/-/,/g')) + fi + cmake_args+=(-DCMAKE_C_COMPILER="${CC}" \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_COMPILER="${CXX}" \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ + -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \ + -DNRN_ENABLE_TESTS=ON \ + -DNRN_ENABLE_PERFORMANCE_TESTS=OFF \ + ${{matrix.config.cmake_option}}) + if [[ "$NRN_ENABLE_PYTHON_DYNAMIC" == "ON" ]]; then + cmake_args+=(-DNRN_ENABLE_PYTHON=ON \ + -DNRN_ENABLE_PYTHON_DYNAMIC=ON \ + -DNRN_PYTHON_DYNAMIC="${PYTHON_MIN};${PYTHON_MAX}" \ + -DNRN_ENABLE_CORENEURON=ON) + else + cmake_args+=(-DPYTHON_EXECUTABLE="${PYTHON}") + fi + if [[ "$NRN_ENABLE_MUSIC" == "ON" ]]; then + cmake_args+=(-DNRN_ENABLE_MUSIC=ON \ + -DCMAKE_PREFIX_PATH=${MUSIC_INSTALL_DIR} \ + -DMUSIC_ROOT=${MUSIC_INSTALL_DIR}) + fi + # Enable more warnings in the builds whose compiler warnings we + # highlight in the GitHub UI + if [[ "${{matrix.config.flag_warnings}}" == "ON" ]]; then + cmake_args+=(-DNRN_EXTRA_CXX_FLAGS="-Wall \ + -Wno-char-subscripts \ + -Wno-unknown-pragmas \ + -Wno-unused-variable \ + -Wno-unused-function \ + -Wno-unused-but-set-variable \ + -Wno-reorder \ + -Wno-sign-compare" \ + -DNRN_EXTRA_MECH_CXX_FLAGS="-Wno-sometimes-uninitialized \ + -Wno-missing-braces") + fi + mkdir build && cd build + echo "Building with: ${cmake_args[@]}" + cmake .. "${cmake_args[@]}" + if ccache --version | grep -E '^ccache version 4\.(4|4\.1)$' + then + echo "------- Disable ccache direct mode -------" + # https://github.com/ccache/ccache/issues/935 + export CCACHE_NODIRECT=1 + fi + ccache -z + # Older versions don't support -v (verbose) + ccache -vs 2>/dev/null || ccache -s + cmake --build . --parallel + ccache -vs 2>/dev/null || ccache -s + if [ "$RUNNER_OS" == "macOS" ] + then + mkdir -p src/nrnpython + echo $'[install]\nprefix='>src/nrnpython/setup.cfg; + fi + if [[ "$NRN_ENABLE_PYTHON_DYNAMIC" == "ON" ]]; then + echo "--RUNNING BASIC TESTS FROM BUILD DIR--" + for python in "${PYTHON_MIN}" "${PYTHON_MAX}" + do + echo "Using ${python}" + NEURONHOME="${PWD}/share/nrn" \ + PYTHONPATH="${PWD}/lib/python" \ + PATH="${PWD}/bin" \ + LD_LIBRARY_PATH="${PWD}/lib:${LD_LIBRARY_PATH}" \ + DYLD_LIBRARY_PATH="${PWD}/lib:${DYLD_LIBRARY_PATH}" \ + "${python}" -c "from neuron import h; import neuron; neuron.test()" + done + fi + ctest --output-on-failure + cmake --build . --target install + export PATH="${INSTALL_DIR}/bin:${PATH}" + if [[ -f "${INSTALL_DIR}/bin/nrn-enable-sanitizer" ]]; then + echo --- bin/nrn-enable-sanitizer --- + cat bin/nrn-enable-sanitizer + echo --- + nrn_enable_sanitizer=${INSTALL_DIR}/bin/nrn-enable-sanitizer + nrn_enable_sanitizer_preload_python="${nrn_enable_sanitizer} --preload python" + else + echo nrn-enable-sanitizer not found, not using it + fi + elif [[ "$BUILD_MODE" == "setuptools" ]]; then + ./packaging/python/build_wheels.bash CI; + fi; + if [[ -z "${nrn_enable_sanitizer_preload_python}" ]]; then + nrn_enable_sanitizer_preload_python="${PYTHON}" + fi + + # basic test for cmake when python is not disabled + if [[ "$BUILD_MODE" == "cmake" && ! "${cmake_args[*]}" =~ "NRN_ENABLE_PYTHON=OFF" ]]; then + ${nrn_enable_sanitizer_preload_python} --version && ${nrn_enable_sanitizer_preload_python} -c 'import neuron; neuron.test()' + fi; + + # test neurondemo with cmake + if [[ "$BUILD_MODE" != "setuptools" ]]; then + ${nrn_enable_sanitizer} neurondemo -nogui -c 'demo(4)' -c 'run()' -c 'quit()' + fi; + + # with cmake dynamic check python_min and python_max together + if [[ "$BUILD_MODE" == "cmake" && "$NRN_ENABLE_PYTHON_DYNAMIC" == "ON" ]]; then + ${nrn_enable_sanitizer_preload_python} -c 'import neuron; neuron.test()' + $PYTHON_MIN -c 'import neuron; neuron.test()' + fi; + + # run rxd tests manually if rxd is enabled *and CoreNEURON is + # disabled -- otherwise hh-related tests fail + if [[ "$BUILD_MODE" == "cmake" \ + && ! "${cmake_args[*]}" =~ "NRN_ENABLE_RX3D=OFF" \ + && ! "${cmake_args[*]}" =~ "NRN_ENABLE_CORENEURON=ON" ]]; then + ${nrn_enable_sanitizer_preload_python} ../share/lib/python/neuron/rxdtests/run_all.py + fi; + + if [ "$BUILD_MODE" == "setuptools" ]; then + neuron_wheel=wheelhouse/NEURON*.whl; + # test with virtual environment + ./packaging/python/test_wheels.sh $PYTHON $neuron_wheel + # test with global installation + ./packaging/python/test_wheels.sh $PYTHON $neuron_wheel false + fi; + env: + BUILD_MODE: ${{ matrix.config.build_mode }} + CCACHE_BASEDIR: ${{runner.workspace}}/nrn + CCACHE_DIR: ${{runner.workspace}}/ccache + NRN_ENABLE_PYTHON_DYNAMIC : ${{ matrix.config.python_dynamic }} + NRN_ENABLE_MUSIC: ${{ matrix.config.music }} + PYTHON_MIN_NAME: "python${{ env.PY_MIN_VERSION }}" + PYTHON_MAX_NAME: "python${{ env.PY_MAX_VERSION }}" + INSTALL_DIR : ${{ runner.workspace }}/install + MATRIX_EVAL: ${{ matrix.config.matrix_eval }} + + - name: Save compiler cache + uses: actions/cache/save@v4 + if: always() && steps.restore-compiler-cache.outputs.cache-hit != 'true' + with: + path: ${{runner.workspace}}/ccache + key: | + ${{matrix.os}}-${{hashfiles('matrix.json')}}-${{github.ref}}- + ${{matrix.os}}-${{hashfiles('matrix.json')}}- + + # This step will set up an SSH connection on tmate.io for live debugging. + # To enable it, you have to: + # * add 'live-debug-ci' to your PR title + # * push something to your PR branch (note that just re-running the pipeline disregards the title update) + - name: live debug session on failure (manual steps required, check `.github/neuron-ci.yml`) + if: failure() && contains(github.event.pull_request.title, 'live-debug-ci') + uses: mxschmitt/action-tmate@v3 + + # see https://github.com/orgs/community/discussions/26822 + final: + name: Final CI + needs: [ci] + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Check ci matrix all done + if: >- + ${{ + contains(needs.*.result, 'failure') + || contains(needs.*.result, 'cancelled') + || contains(needs.*.result, 'skipped') + }} + run: exit 1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..3ba15345b3 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,91 @@ +name: NEURON Release + +on: + workflow_dispatch: + inputs: + rel_branch: + description: 'Release branch/commit' + default: 'release/x.y' + required: true + rel_tag: + description: 'Release version (tag name)' + default: 'x.y.z' + required: true + +env: + GH_REPO: ${{ github.server_url }}/${{ github.repository }} + REL_TAG: ${{ github.event.inputs.rel_tag }} + REL_BRANCH: ${{ github.event.inputs.rel_branch }} + +jobs: + tag-n-release: + runs-on: ubuntu-latest + name: tag-n-release ${{ github.event.inputs.rel_tag }} (${{ github.event.inputs.rel_branch }}) + outputs: + release_url: ${{ steps.create_release.outputs.upload_url }} + rel_tag: ${{ env.REL_TAG }} + steps: + - uses: actions/checkout@v4 + name: Checkout branch ${{ env.REL_BRANCH }} + with: + ref: ${{ env.REL_BRANCH }} + + - name: Create and upload tag ${{ env.REL_TAG }} + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git tag -a $REL_TAG -m "${REL_TAG}" + git push origin $REL_TAG + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + working-directory: ${{runner.workspace}}/nrn + + - name: Create Release + id: create_release + uses: ncipollo/release-action@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag: ${{ env.REL_TAG }} + name: Release ${{ env.REL_TAG }} + prerelease: true + + nrn-full-src-package: + runs-on: ubuntu-latest + needs: tag-n-release + steps: + - name: Checkout feature-rich code + run: | + git clone --depth=1 --shallow-submodules --recurse-submodules $GH_REPO -b $REL_TAG --single-branch + cd nrn + LOCAL_TAG=`git tag` + if [ $REL_TAG != $LOCAL_TAG ]; then + echo "Wrong tag downloaded!" + exit 1 + else + git log --oneline + fi + + - name: Make nrnversion.h + run: | + mkdir build && cd build + cmake -DNRN_ENABLE_PYTHON=OFF -DNRN_ENABLE_RX3D=OFF -DNRN_ENABLE_MPI=OFF -DNRN_ENABLE_INTERVIEWS=OFF ../nrn + make nrnversion_h VERBOSE=1 + + - name: Create nrn-full-src-package + id: tar + run: | + tar -czvf nrn-full-src-package-${REL_TAG}.tar.gz nrn + echo "asset_file=nrn-full-src-package-${REL_TAG}.tar.gz" >> $GITHUB_OUTPUT + + - name: Upload nrn-full-src-package to release + run: | + gh release upload ${{ needs.tag-n-release.outputs.rel_tag }} ${{ steps.tar.outputs.asset_file }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + windows-installer: + needs: tag-n-release + uses: neuronsimulator/nrn/.github/workflows/windows.yml@master + with: + tag: ${{ needs.tag-n-release.outputs.rel_tag }} diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml new file mode 100644 index 0000000000..985eae0ae0 --- /dev/null +++ b/.github/workflows/windows.yml @@ -0,0 +1,104 @@ +name: Windows Installer + +concurrency: + # Don't cancel on master, creating a PR when a push workflow is already going will cancel the push workflow in favour of the PR workflow + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.event.number && github.head_ref || github.ref_name }} + cancel-in-progress: true + +on: + merge_group: + workflow_call: + inputs: + tag: + description: 'Release version (tag name)' + default: '' + required: true + type: string + push: + branches: + - master + - release/** + pull_request: + branches: + - master + - release/** +# TODO : https://github.com/neuronsimulator/nrn/issues/1063 +# paths-ignore: +# - '**.md' +# - '**.rst' +# - 'docs/**' +env: + MSYS2_ROOT: C:\msys64 + +jobs: + WindowsInstaller: + runs-on: windows-latest + timeout-minutes: 45 + + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.tag }} + + - name: Retrieve rxd test data + run: | + git submodule update --init test/rxd/testdata + shell: powershell + working-directory: ${{runner.workspace}}\nrn + + - name: Set up Python3 + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Download Dependencies + run: | + .\nrn\ci\win_download_deps.cmd + shell: powershell + working-directory: ${{runner.workspace}} + + - name: Install Dependencies + run: .\nrn\ci\win_install_deps.cmd + shell: powershell + working-directory: ${{runner.workspace}} + + - name: Build and Create Installer + run: | + rm.exe C:\WINDOWS\system32\bash.EXE + %MSYS2_ROOT%\usr\bin\bash -lc "$BUILD_SOURCESDIRECTORY/ci/win_build_cmake.sh" + shell: cmd + working-directory: ${{runner.workspace}}\nrn + env: + BUILD_SOURCESDIRECTORY: ${{runner.workspace}}\nrn + + # This step will set up an SSH connection on tmate.io for live debugging. + # To enable it, you have to: + # * add 'live-debug-win' to your PR title + # * push something to your PR branch (note that just re-running the pipeline disregards the title update) + - name: live debug session on failure (manual steps required, check `.github/windows.yml`) + if: failure() && contains(github.event.pull_request.title, 'live-debug-win') + uses: mxschmitt/action-tmate@v3 + + - name: Upload build artifact + uses: actions/upload-artifact@v4 + with: + name: nrn-nightly-AMD64.exe + path: ${{runner.workspace}}\nrn\nrn-nightly-AMD64.exe + + - name: Run installer and launch .hoc associaton test + run: .\ci\win_install_neuron.cmd + shell: cmd + working-directory: ${{runner.workspace}}\nrn + + - name: Test Installer + run: .\ci\win_test_installer.cmd + shell: cmd + working-directory: ${{runner.workspace}}\nrn + + - name: Publish Release Installer + working-directory: ${{runner.workspace}}\nrn + if: inputs.tag != '' + run: | + gh release upload ${{ inputs.tag }} nrn-nightly-AMD64.exe + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 97b1f60f63..4c63865634 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -34,8 +34,16 @@ stages: vmImage: 'ubuntu-20.04' strategy: matrix: + Python38: + python.version: '3.8' Python39: python.version: '3.9' + Python310: + python.version: '3.10' + Python311: + python.version: '3.11' + Python312: + python.version: '3.12' steps: # Secure files documentation: @@ -73,6 +81,81 @@ stages: - template: ci/azure-wheel-test-upload.yml + # Jobs to build OSX wheels natively + - job: 'MacOSWheels' + timeoutInMinutes: 60 + pool: + vmImage: 'macOS-13' + strategy: + matrix: + Python38: + python.version: '3.8' + python.org.version: '3.8.9' + python.installer.name: 'macosx10.9.pkg' + Python39: + python.version: '3.9' + python.org.version: '3.9.13' + python.installer.name: 'macos11.pkg' + Python310: + python.version: '3.10' + python.org.version: '3.10.11' + python.installer.name: 'macos11.pkg' + Python311: + python.version: '3.11' + python.org.version: '3.11.7' + python.installer.name: 'macos11.pkg' + Python312: + python.version: '3.12' + python.org.version: '3.12.0' + python.installer.name: 'macos11.pkg' + + steps: + + - script: | + installer=python-$(python.org.version)-$(python.installer.name) + url=https://www.python.org/ftp/python/$(python.org.version)/$installer + curl $url -o $installer + sudo installer -pkg $installer -target / + displayName: 'Install Python from python.org' + + - script: | + brew install --cask xquartz + brew install flex bison mpich + brew unlink mpich && brew install openmpi + cmake --version + # see https://github.com/BlueBrain/CoreNeuron/issues/817, uninstall libomp until we fix this + # as we are building wheels, we shouldn't enable OpenMP here anyway + brew uninstall --ignore-dependencies libomp || echo "libomp doesn't exist" + displayName: 'Install OSX System Dependencies' + + # readline has been manually built with ncurses and MACOSX_DEPLOYMENT_TARGET=10.9 and stored as secure file on Azure. + # See `packaging/python/Dockerfile` for build instructions. + # + # Secure files documentation: + # https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops + # NOTE: when uploading new secure files, access must be permitted from the Azure pipeline interface (check message there) + - task: DownloadSecureFile@1 + name: readlineSF + displayName: 'Download readline secure file' + inputs: + secureFile: 'readline7.0-ncurses6.4.tar.gz' + + # 10.14 is required for full C++17 support according to + # https://cibuildwheel.readthedocs.io/en/stable/cpp_standards, but it + # seems that 10.15 is actually needed for std::filesystem::path. + - script: | + export MACOSX_DEPLOYMENT_TARGET=10.15 + export PATH=/usr/local/opt/flex/bin:/usr/local/opt/bison/bin:$PATH + export SDKROOT=$(xcrun --sdk macosx --show-sdk-path) + export NRN_BUILD_FOR_UPLOAD=1 + sudo mkdir -p /opt/nrnwheel/$(uname -m) + sudo tar -zxf $(readlineSF.secureFilePath) --directory /opt/nrnwheel/$(uname -m) + packaging/python/build_wheels.bash osx $(python.version) coreneuron + displayName: 'Build MacOS Wheel' + + - template: ci/azure-wheel-test-upload.yml + + - stage: Final jobs: - job: AzureDropURL