From 66b0c66097825238a2390a855f9ccf1f128ea471 Mon Sep 17 00:00:00 2001 From: Artem Sokolov Date: Thu, 12 May 2022 15:40:53 -0400 Subject: [PATCH] CI/CD update (#12) * Building parallel Docker images * CI rough draft * Updated Dockerfile for the large container image --- .github/workflows/ci.yml | 57 +++++++++++++++++++++++++++++++++ .github/workflows/dockerhub.yml | 16 +++++++-- large/Dockerfile | 11 ++++++- 3 files changed, 80 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..811c723 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,57 @@ +name: CI +on: [push, pull_request, workflow_dispatch] + +jobs: + test: + runs-on: ubuntu-latest + env: + IMG: exemplar-001-cycle6.ome.tif + PMAP: exemplar-001-cycle6_Probabilities_1.tif + steps: + - uses: actions/checkout@v3 + + - name: Build the Docker container + run: | + docker build -t s3seg:test . + docker build -t s3seg:test-large large/ + + # Cache test data to avoid repeated download + - uses: actions/cache@v3 + id: cache-data + with: + path: ~/data/*.tif + key: testdata-2022-05-12 + + # Download test data only if no cache is present + - name: Test data download + if: steps.cache-data.outputs.cache-hit != 'true' + run: | + mkdir ~/data + cd ~/data + curl -f -o $IMG "https://mcmicro.s3.amazonaws.com/ci/$IMG" + curl -f -o $PMAP "https://mcmicro.s3.amazonaws.com/ci/$PMAP" + + - name: Test the default container + run: | + cd ~/data + rm -rf exemplar-001-cycle6 + docker run -v "$PWD":/data s3seg:test /bin/bash -c "cd /data; \ + python /app/S3segmenter.py --imagePath $IMG --stackProbPath $PMAP --outputPath ." + + - name: Test the large container + run: | + cd ~/data + rm -rf large + mkdir large + docker run -v "$PWD":/data s3seg:test-large /bin/bash -c "cd /data; \ + python /app/S3segmenter.py --imagePath $IMG --stackProbPath $PMAP --outputPath large" + + # If the action is successful, the output will be available as a downloadable artifact + - name: Upload processed result + uses: actions/upload-artifact@v2 + with: + name: ex001-s3seg + path: | + ~/data/exemplar-001-cycle6/** + ~/data/large/** + \ No newline at end of file diff --git a/.github/workflows/dockerhub.yml b/.github/workflows/dockerhub.yml index 828acbe..0e7b127 100644 --- a/.github/workflows/dockerhub.yml +++ b/.github/workflows/dockerhub.yml @@ -21,12 +21,22 @@ jobs: username: ${{ secrets.MCMICRO_USERNAME }} password: ${{ secrets.MCMICRO_TOKEN }} - - name: Build and push - id: docker_build + name: Build and push the standard image + id: docker_build_standard uses: docker/build-push-action@v2 with: push: true tags: labsyspharm/s3segmenter:${{ github.event.release.tag_name }} + - + name: Build and push the large-scale image + id: docker_build_large + uses: docker/build-push-action@v2 + with: + context: large + push: true + tags: labsyspharm/s3segmenter:${{ github.event.release.tag_name }}-large - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} + run: | + echo ${{ steps.docker_build_standard.outputs.digest }} + echo ${{ steps.docker_build_large.outputs.digest }} diff --git a/large/Dockerfile b/large/Dockerfile index b472d0a..799258f 100644 --- a/large/Dockerfile +++ b/large/Dockerfile @@ -1,6 +1,15 @@ FROM python:3.10 -RUN pip install scikit-learn scikit-image opencv-python tifffile imagecodecs dask-image zarr ome_types +RUN python -m pip install \ + dask[dataframe] \ + scikit-learn \ + scikit-image \ + opencv-python-headless \ + tifffile \ + imagecodecs \ + dask-image \ + zarr \ + ome_types COPY S3segmenter.py ./app/S3segmenter.py COPY save_tifffile_pyramid.py ./app/save_tifffile_pyramid.py