diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 02cf2bd461..5ce9b4ccae 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,6 @@ +# prettier always +f9516aca57f53732806778ab261ae0d18d9daab8 + # prettier again d6b71d6e4c075792f7885b4acb747049e496dc76 diff --git a/.github/ISSUE_TEMPLATE/rapport-de-bug.md b/.github/ISSUE_TEMPLATE/rapport-de-bug.md index d65f3ce9f9..97ea13eeb5 100644 --- a/.github/ISSUE_TEMPLATE/rapport-de-bug.md +++ b/.github/ISSUE_TEMPLATE/rapport-de-bug.md @@ -13,8 +13,8 @@ Version de GeoNature affectée par le bug. **Description du bug** Description du problème rencontré (message d’erreur, code HTTP de retour inattendu, …). -**Comportement attendue** -Description du comportement attendue en lieu et place du problème rencontré. +**Comportement attendu** +Description du comportement attendu en lieu et place du problème rencontré. **Comment reproduire** Étapes à suivre pour reproduire le problème (sur quelle page se rendre, sur quel bouton cliquer, avec quelles données présentes, …). diff --git a/.github/workflows/cypress.yml b/.github/workflows/cypress.yml index 6a04114c2a..cbdb334fb3 100644 --- a/.github/workflows/cypress.yml +++ b/.github/workflows/cypress.yml @@ -5,6 +5,7 @@ on: - master - hotfixes - develop + - fixtestfront pull_request: branches: - master @@ -37,19 +38,16 @@ jobs: psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "postgis_raster";' env: PGPASSWORD: geonatpasswd + - uses: actions/checkout@v3 + with: + submodules: recursive - name: Add database extensions run: | - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "hstore";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "uuid-ossp";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "pg_trgm";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "unaccent";' + psql -h localhost -U geonatadmin -d geonature2db -f install/assets/db/add_pg_extensions.sql env: PGPASSWORD: geonatpasswd - - uses: actions/checkout@v2 - with: - submodules: recursive - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.9 # cache: 'pip' @@ -59,7 +57,7 @@ jobs: sudo apt install -y libgdal-dev - name: Cache dependencies - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.cache/pip key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements-dev.txt') }} @@ -75,47 +73,41 @@ jobs: working-directory: ./backend - name: Install database run: | - geonature db upgrade geonature@head -x local-srid=2154 - geonature db autoupgrade -x local-srid=2154 - geonature db upgrade geonature-samples@head - geonature db upgrade taxhub@head - geonature taxref import-v15 --skip-bdc-statuts - geonature db upgrade nomenclatures_taxonomie_data@head - geonature db upgrade ref_geo_fr_departments@head - - env: - GEONATURE_CONFIG_FILE: config/test_config.toml - - name: Install core modules - run: | - geonature install-gn-module contrib/occtax OCCTAX --build=false - geonature db upgrade occtax-samples-test@head - geonature install-gn-module contrib/gn_module_occhab OCCHAB --build=false - geonature db upgrade occhab-samples@head - geonature install-gn-module contrib/gn_module_validation VALIDATION --build=false + install/03b_populate_db_for_test.sh env: GEONATURE_CONFIG_FILE: config/test_config.toml # FRONTEND - name: Cache node modules - uses: actions/cache@v1 + uses: actions/cache@v3 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - name: Node ${{ matrix.node-version }} - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - - name: install frontend + - name: generate frontend config run: | cp ./config/settings.ini.sample ./config/settings.ini geonature generate_frontend_config ./install/05_install_frontend.sh --ci env: - GEONATURE_CONFIG_FILE: config/test_config.toml + GEONATURE_CONFIG_FILE: "${{ github.workspace }}/config/test_config.toml" - name: Show generated frontend config run: | cat ./frontend/src/conf/app.config.ts + - name: Install core modules + run: | + geonature install-gn-module contrib/occtax OCCTAX --build=false + geonature db upgrade occtax-samples-test@head + geonature install-gn-module contrib/gn_module_occhab OCCHAB --build=false + geonature db upgrade occhab-samples@head + geonature install-gn-module contrib/gn_module_validation VALIDATION --build=false + geonature permissions supergrant --group --nom "Grp_admin" --yes + env: + GEONATURE_CONFIG_FILE: config/test_config.toml - name: Run GeoNature backend run: geonature dev_back & env: @@ -127,14 +119,13 @@ jobs: TAXHUB_SETTINGS: test_config.py TAXHUB_SQLALCHEMY_DATABASE_URI: "postgresql://geonatadmin:geonatpasswd@127.0.0.1:5432/geonature2db" - name: Cypress run - uses: cypress-io/github-action@v4 + uses: cypress-io/github-action@v5 with: - record: ${{ fromJSON(github.ref_name == 'master') }} + record: false working-directory: ./frontend start: npm run start wait-on: http://127.0.0.1:4200 headless: true env: - is_master: CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 7e89d213a7..e1f2ca2d4e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -11,9 +11,6 @@ jobs: build-backend: name: Build backend image runs-on: ubuntu-latest - outputs: - image_name: ${{ fromJSON(steps.build-prod.outputs.metadata)['image.name'] }} - image_digest: ${{ steps.build-prod.outputs.digest }} steps: - name: Checkout @@ -45,7 +42,7 @@ jobs: - name: Build and export wheels image id: build-wheels - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./backend/Dockerfile @@ -65,66 +62,18 @@ jobs: - name: Build and export prod image id: build-prod - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: context: . file: ./backend/Dockerfile - target: prod-full + target: prod push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta-prod.outputs.tags }} labels: ${{ steps.meta-prod.outputs.labels }} - frontend-config: - name: Generate frontend config - runs-on: ubuntu-latest - needs: build-backend - container: ${{ needs.build-backend.outputs.image_name }}@${{ needs.build-backend.outputs.image_digest }} - steps: - - - name: Checkout - uses: actions/checkout@v3 - - - name: Generate frontend configuration - run: | - geonature generate-frontend-config --input frontend/src/conf/app.config.ts.sample --output frontend/src/conf/app.config.ts - geonature generate-frontend-module-config OCCTAX --output contrib/occtax/frontend/app/module.config.ts - geonature generate-frontend-module-config OCCHAB --output contrib/gn_module_occhab/frontend/app/module.config.ts - geonature generate-frontend-module-config VALIDATION --output contrib/gn_module_validation/frontend/app/module.config.ts - env: - GEONATURE_SQLALCHEMY_DATABASE_URI: "postgresql://pguser:pgpass@localhost:5432/dbname" - GEONATURE_SECRET_KEY: "this is in fact not used in this action" - GEONATURE_API_ENDPOINT: "http://127.0.0.1:8000" - GEONATURE_API_TAXHUB: "http://127.0.0.1:5000/api" - GEONATURE_URL_APPLICATION: "http://127.0.0.1:4200" - - - name: Archive GeoNature frontend configuration - uses: actions/upload-artifact@v3 - with: - name: frontend-config-geonature - path: frontend/src/conf/app.config.ts - - - name: Archive OccTax frontend configuration - uses: actions/upload-artifact@v3 - with: - name: frontend-config-occtax - path: contrib/occtax/frontend/app/module.config.ts - - - name: Archive OccHab frontend configuration - uses: actions/upload-artifact@v3 - with: - name: frontend-config-occhab - path: contrib/gn_module_occhab/frontend/app/module.config.ts - - - name: Archive Validation frontend config - uses: actions/upload-artifact@v3 - with: - name: frontend-config-validation - path: contrib/gn_module_validation/frontend/app/module.config.ts - build-frontend: name: Build frontend image runs-on: ubuntu-latest - needs: frontend-config steps: - name: Checkout @@ -132,56 +81,74 @@ jobs: with: submodules: True - - name: Download GeoNature frontend configuartion - uses: actions/download-artifact@v3 - with: - name: frontend-config-geonature - path: frontend/src/conf/ + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 - - name: Download OccTax frontend configuartion - uses: actions/download-artifact@v3 + name: Login to GHCR + uses: docker/login-action@v2 + if: github.event_name != 'pull_request' with: - name: frontend-config-occtax - path: contrib/occtax/frontend/app/ + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - - name: Download OccHab frontend configuartion - uses: actions/download-artifact@v3 + name: Generate metadata for source image + id: meta-source + uses: docker/metadata-action@v4 with: - name: frontend-config-occhab - path: contrib/gn_module_occhab/frontend/app/ + images: ghcr.io/${{ github.repository_owner }}/geonature-frontend + flavor: | + suffix=-source + tags: | + type=ref,event=branch + type=ref,event=tag - - name: Download Validation frontend configuartion - uses: actions/download-artifact@v3 + name: Build and export source image + uses: docker/build-push-action@v4 with: - name: frontend-config-validation - path: contrib/gn_module_validation/frontend/app/ - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + context: . + file: ./frontend/Dockerfile + target: source + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta-source.outputs.tags }} + labels: ${{ steps.meta-source.outputs.labels }} - - name: Docker meta - id: meta + name: Generate metadata for nginx image + id: meta-nginx uses: docker/metadata-action@v4 with: images: ghcr.io/${{ github.repository_owner }}/geonature-frontend + flavor: | + suffix=-nginx tags: | type=ref,event=branch type=ref,event=tag - - name: Login to GHCR - uses: docker/login-action@v2 - if: github.event_name != 'pull_request' + name: Build and export nginx image + uses: docker/build-push-action@v4 with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + context: . + file: ./frontend/Dockerfile + target: prod-base + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta-nginx.outputs.tags }} + labels: ${{ steps.meta-nginx.outputs.labels }} + - + name: Generate metadata for prod image + id: meta-prod + uses: docker/metadata-action@v4 + with: + images: ghcr.io/${{ github.repository_owner }}/geonature-frontend + tags: | + type=ref,event=branch + type=ref,event=tag - - name: Build and export - uses: docker/build-push-action@v3 + name: Build and export prod image + uses: docker/build-push-action@v4 with: context: . file: ./frontend/Dockerfile - target: prod-full - push: false - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + target: prod + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta-prod.outputs.tags }} + labels: ${{ steps.meta-prod.outputs.labels }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 0fa016fa67..051174e220 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -6,16 +6,19 @@ jobs: backend: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Backend code formatting check (Black) uses: psf/black@stable frontend: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Frontend code formatting check (Prettier) - uses: creyD/prettier_action@v4.2 + - uses: actions/checkout@v3 with: - dry: True - prettier_options: --config frontend/.prettierrc --check frontend/src/app/**/*.ts + fetch-depth: 0 + - uses: actions/setup-node@v3 + - name: Frontend code formatting check (Prettier) + run: npm install prettier && npm run format:check + working-directory: ./frontend \ No newline at end of file diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 545baa7517..d533b3d286 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,17 +19,22 @@ jobs: strategy: fail-fast: false matrix: + debian-version: [ '10', '11', '12' ] include: - - name: "Debian 10" - python-version: "3.7" - postgres-version: 11 - postgis-version: 2.5 - - name: "Debian 11" - python-version: "3.9" - postgres-version: 13 - postgis-version: 3.2 + - debian-version: '10' + python-version: '3.7' + postgres-version: '11' + postgis-version: '2.5' + - debian-version: '11' + python-version: '3.9' + postgres-version: '13' + postgis-version: '3.2' + - debian-version: '12' + python-version: '3.11' + postgres-version: '15' + postgis-version: '3.3' - name: ${{ matrix.name }} + name: Debian ${{ matrix.debian-version }} services: postgres: @@ -53,19 +58,16 @@ jobs: psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "postgis_raster";' env: PGPASSWORD: geonatpasswd + - uses: actions/checkout@v3 + with: + submodules: true - name: Add database extensions run: | - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "hstore";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "uuid-ossp";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "pg_trgm";' - psql -h localhost -U geonatadmin -d geonature2db -tc 'CREATE EXTENSION "unaccent";' + psql -h localhost -U geonatadmin -d geonature2db -f install/assets/db/add_pg_extensions.sql env: PGPASSWORD: geonatpasswd - - uses: actions/checkout@v2 - with: - submodules: true - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} cache: "pip" @@ -87,20 +89,7 @@ jobs: GEONATURE_CONFIG_FILE: config/test_config.toml - name: Install database run: | - geonature db upgrade geonature@head -x local-srid=2154 - geonature db autoupgrade -x local-srid=2154 - geonature db upgrade ref_geo_fr_departments@head - geonature taxref import-v15 - geonature db upgrade geonature-samples@head - geonature db upgrade nomenclatures_taxonomie_data@head - geonature db upgrade ref_geo_fr_municipalities@head - geonature db upgrade ref_geo_inpn_grids_5@head - geonature sensitivity add-referential \ - --source-name "Référentiel sensibilité TAXREF v15 20220331" \ - --url https://inpn.mnhn.fr/docs-web/docs/download/401875 \ - --zipfile RefSensibiliteV15_20220331.zip \ - --csvfile RefSensibilite_V15_31032022/RefSensibilite_15.csv \ - --encoding=iso-8859-15 + install/03b_populate_db_for_test.sh env: GEONATURE_CONFIG_FILE: config/test_config.toml - name: Show database status @@ -129,7 +118,7 @@ jobs: env: GEONATURE_CONFIG_FILE: config/test_config.toml - name: Upload coverage to Codecov - if: ${{ matrix.name == 'Debian 11' }} - uses: codecov/codecov-action@v2 + if: ${{ matrix.debian-version == '11' }} + uses: codecov/codecov-action@v3 with: flags: pytest diff --git a/.github/workflows/sphinx.yml b/.github/workflows/sphinx.yml index 4dba45e2fb..24274dd820 100644 --- a/.github/workflows/sphinx.yml +++ b/.github/workflows/sphinx.yml @@ -24,7 +24,7 @@ jobs: --health-timeout 5s --health-retries 5 steps: - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: 3.9 - uses: actions/checkout@master diff --git a/.gitignore b/.gitignore index ec5593b5fa..0e74f5cb0d 100755 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ *.*~ settings.ini -config/geonature_config.toml +config/*_config.toml **/conf_gn_module.toml **/module.config.ts @@ -8,8 +8,6 @@ config/geonature_config.toml log/* .vscode/ geonature_config.toml -external_assets/ -frontend/external_modules/* tmp/ var/ data/migrations/v1tov2/old/v1_compat.ini @@ -17,13 +15,11 @@ data/migrations/v1tov2/old/v1_compat.log data/migrations/v1tov2/migratetoV2.ini config.py -backend/static/medias/* -backend/static/shapefiles/* -backend/static/geopackages/* backend/static/node_modules -backend/static/configs/* +backend/media backend/run.py +cache/* .vscode @@ -101,13 +97,6 @@ instance/ # Sphinx documentation docs/_build/ -#frontend doc - -#frontend test -frontend/cypress/videos -frontend/cypress/screenshots -frontend/coverage -frontend/.nyc_output/out.json # PyBuilder target/ @@ -129,7 +118,7 @@ celerybeat-schedule # virtualenv .venv -venv/ +venv*/ ENV/ # Spyder project settings @@ -154,11 +143,9 @@ install_all/install_all.log # Pycharm .idea/ -backend/static/images/taxa.png - *.swp *.swo /docs/CHANGELOG.html -frontend/coverage/clover.xml -frontend/.nyc_output/out.json + +/contrib/*/frontend/node_modules diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8eed601bb7..a69db15be2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.1.0 hooks: - id: black language_version: python3.9 diff --git a/README.md b/README.md index 16474d0661..90332dee15 100644 --- a/README.md +++ b/README.md @@ -12,10 +12,10 @@ technologies : - Python - Flask - Leaflet -- Angular 7 +- Angular - Bootstrap -Documentation sur . +Documentation sur . **Application de saisie, de gestion, de synthèse et de diffusion d\'observations faune et flore.** @@ -133,9 +133,9 @@ Licence ======= - OpenSource - GPL-3.0 -- Copyleft 2014-2022 - Parc National des Écrins - Parc national des +- Copyleft 2014-2023 - Parc National des Écrins - Parc national des Cévennes -[![image](http://geonature.fr/img/logo-pne.jpg)](http://www.ecrins-parcnational.fr) +[![image](https://geonature.fr/img/logo-pne.jpg)](https://www.ecrins-parcnational.fr) -[![image](http://geonature.fr/img/logo-pnc.jpg)](http://www.cevennes-parcnational.fr) +[![image](https://geonature.fr/img/logo-pnc.jpg)](https://www.cevennes-parcnational.fr) diff --git a/VERSION b/VERSION index 9e5bb77a3b..53fdb123b9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.11.2 +2.13.1 \ No newline at end of file diff --git a/backend/Dockerfile b/backend/Dockerfile index f462f3ca33..3a2808fbe7 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -6,7 +6,6 @@ ENV PIP_ROOT_USER_ACTION=ignore RUN --mount=type=cache,target=/root/.cache \ pip install --upgrade pip setuptools wheel - FROM build AS build-habref WORKDIR /build/ COPY /backend/dependencies/Habref-api-module . @@ -107,7 +106,7 @@ RUN --mount=type=cache,target=/root/.npm \ npm ci --omit=dev -FROM python:3.9-bullseye AS wheels +FROM python:3.9-bullseye AS wheels-light ENV PIP_ROOT_USER_ACTION=ignore RUN --mount=type=cache,target=/root/.cache \ @@ -117,6 +116,10 @@ WORKDIR /dist ENV GEONATURE_STATIC_FOLDER=/dist/static/ COPY /backend/static/ ./static/ COPY --from=node /dist/node_modules/ ./static/node_modules/ +ENV GEONATURE_CUSTOM_STATIC_FOLDER=/dist/custom/ +RUN mkdir custom +ENV GEONATURE_MEDIA_FOLDER=/dist/media/ +RUN mkdir -p media/attachments WORKDIR /dist/geonature @@ -135,14 +138,31 @@ COPY --from=build-utils /build/dist/*.whl . COPY --from=build-utils-geo /build/dist/*.whl . COPY --from=build-geonature /build/dist/*.whl . +COPY --chmod=755 /install/03b_populate_db.sh /populate_db.sh +COPY --chmod=755 /install/assets/docker_entrypoint.sh /entrypoint.sh + ENV GEONATURE_CONFIG_FILE "" EXPOSE 8000 -CMD ["gunicorn", "geonature:create_app()", "--bind=0.0.0.0:8000", "--access-logfile=-"] +ENTRYPOINT ["/entrypoint.sh"] +CMD [ "gunicorn", "geonature:create_app()", \ + "--name=geonature", \ + "--workers=2", \ + "--threads=2", \ + "--access-logfile=-", \ + "--bind=0.0.0.0:8000" \ +] + +FROM wheels-light AS wheels -FROM wheels AS prod-light +COPY --from=build-occtax /build/dist/*.whl . +COPY --from=build-occhab /build/dist/*.whl . +COPY --from=build-validation /build/dist/*.whl . + + +FROM wheels-light AS prod-light WORKDIR /dist/geonature RUN --mount=type=cache,target=/root/.cache \ @@ -150,12 +170,9 @@ RUN --mount=type=cache,target=/root/.cache \ RUN rm -f *.whl -FROM wheels AS prod-full +FROM wheels AS prod WORKDIR /dist/geonature -COPY --from=build-occtax /build/dist/*.whl . -COPY --from=build-occhab /build/dist/*.whl . -COPY --from=build-validation /build/dist/*.whl . RUN --mount=type=cache,target=/root/.cache \ pip install *.whl RUN rm -f *.whl diff --git a/backend/dependencies/Habref-api-module b/backend/dependencies/Habref-api-module index 6a0c527eab..fc594b90e2 160000 --- a/backend/dependencies/Habref-api-module +++ b/backend/dependencies/Habref-api-module @@ -1 +1 @@ -Subproject commit 6a0c527eabfb14c5dbefe9dc099ce3db4137a269 +Subproject commit fc594b90e2f8174473d72be579b42b4f6a5860be diff --git a/backend/dependencies/Nomenclature-api-module b/backend/dependencies/Nomenclature-api-module index 50305242a4..f9102ca7c1 160000 --- a/backend/dependencies/Nomenclature-api-module +++ b/backend/dependencies/Nomenclature-api-module @@ -1 +1 @@ -Subproject commit 50305242a432a2ed5bd2a2ed0a9d5e66d39ac0f7 +Subproject commit f9102ca7c14d9cdf189f75b9d4754984a76503f7 diff --git a/backend/dependencies/RefGeo b/backend/dependencies/RefGeo index 615fcff503..d17afaec89 160000 --- a/backend/dependencies/RefGeo +++ b/backend/dependencies/RefGeo @@ -1 +1 @@ -Subproject commit 615fcff503d015e3ea67e4697f827b1c54814e86 +Subproject commit d17afaec89dacf1edc47a64d629db64d07895907 diff --git a/backend/dependencies/TaxHub b/backend/dependencies/TaxHub index f10e286a29..4855eec69b 160000 --- a/backend/dependencies/TaxHub +++ b/backend/dependencies/TaxHub @@ -1 +1 @@ -Subproject commit f10e286a2997d8fd44bf0f1c2ff94d9ad31bad56 +Subproject commit 4855eec69b698eda65dfcf24643e7e6ad99c8ebb diff --git a/backend/dependencies/UsersHub b/backend/dependencies/UsersHub index 08683e4c1c..37587da6d1 160000 --- a/backend/dependencies/UsersHub +++ b/backend/dependencies/UsersHub @@ -1 +1 @@ -Subproject commit 08683e4c1c9e90f7b246cb30f06f6167987745fa +Subproject commit 37587da6d18acc44e6e38585ff3b0d959d8f7eb7 diff --git a/backend/dependencies/UsersHub-authentification-module b/backend/dependencies/UsersHub-authentification-module index 70f3488f88..35427a1e80 160000 --- a/backend/dependencies/UsersHub-authentification-module +++ b/backend/dependencies/UsersHub-authentification-module @@ -1 +1 @@ -Subproject commit 70f3488f8845e2b5b05230e194880bcf1ba01219 +Subproject commit 35427a1e80c1e0fa00bd5e0ec7a007569ed5ddc3 diff --git a/backend/dependencies/Utils-Flask-SQLAlchemy b/backend/dependencies/Utils-Flask-SQLAlchemy index 34fa739bf4..ef3bde348e 160000 --- a/backend/dependencies/Utils-Flask-SQLAlchemy +++ b/backend/dependencies/Utils-Flask-SQLAlchemy @@ -1 +1 @@ -Subproject commit 34fa739bf45559a8b6b6d6ace7d2c12bc4feecfa +Subproject commit ef3bde348e86b8a69d1dbc0a7b87a843eb7973db diff --git a/backend/dependencies/Utils-Flask-SQLAlchemy-Geo b/backend/dependencies/Utils-Flask-SQLAlchemy-Geo index 0adaef60ff..2b5002bf13 160000 --- a/backend/dependencies/Utils-Flask-SQLAlchemy-Geo +++ b/backend/dependencies/Utils-Flask-SQLAlchemy-Geo @@ -1 +1 @@ -Subproject commit 0adaef60ffd3ef5e114a8a8192a81564012a73f1 +Subproject commit 2b5002bf13762d1c49b5d18516cd72d82861f12c diff --git a/backend/geonature/app.py b/backend/geonature/app.py index b66aac4441..b2a7b4a018 100755 --- a/backend/geonature/app.py +++ b/backend/geonature/app.py @@ -4,23 +4,37 @@ import logging, warnings, os, sys from itertools import chain -from pkg_resources import iter_entry_points, load_entry_point from importlib import import_module +from packaging import version -from flask import Flask, g, request, current_app +if sys.version_info < (3, 10): + from importlib_metadata import entry_points +else: + from importlib.metadata import entry_points + +from flask import Flask, g, request, current_app, send_from_directory from flask.json.provider import DefaultJSONProvider from flask_mail import Message from flask_cors import CORS from flask_sqlalchemy import before_models_committed from werkzeug.middleware.proxy_fix import ProxyFix +from werkzeug.middleware.shared_data import SharedDataMiddleware +from werkzeug.middleware.dispatcher import DispatcherMiddleware +from werkzeug.wrappers import Response from psycopg2.errors import UndefinedTable +import sqlalchemy as sa from sqlalchemy.exc import OperationalError, ProgrammingError from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy.engine import RowProxy + +if version.parse(sa.__version__) >= version.parse("1.4"): + from sqlalchemy.engine import Row +else: # retro-compatibility SQLAlchemy 1.3 + from sqlalchemy.engine import RowProxy as Row from geonature.utils.config import config from geonature.utils.env import MAIL, DB, db, MA, migrate, BACKEND_DIR from geonature.utils.logs import config_loggers +from geonature.utils.module import iter_modules_dist from geonature.core.admin.admin import admin from geonature.middlewares import SchemeFix, RequestID @@ -39,15 +53,16 @@ def configure_alembic(alembic_config): 'migrations' entry point value of the 'gn_module' group for all modules having such entry point. Thus, alembic will find migrations of all installed geonature modules. """ - version_locations = alembic_config.get_main_option("version_locations", default="").split() + version_locations = set( + alembic_config.get_main_option("version_locations", default="").split() + ) if "VERSION_LOCATIONS" in config["ALEMBIC"]: - version_locations.extend(config["ALEMBIC"]["VERSION_LOCATIONS"].split()) + version_locations |= set(config["ALEMBIC"]["VERSION_LOCATIONS"].split()) for entry_point in chain( - iter_entry_points("alembic", "migrations"), iter_entry_points("gn_module", "migrations") + entry_points(group="alembic", name="migrations"), + entry_points(group="gn_module", name="migrations"), ): - # TODO: define enabled module in configuration (skip disabled module, raise error on missing module) - _, migrations = str(entry_point).split("=", 1) - version_locations += [migrations.strip()] + version_locations.add(entry_point.value) alembic_config.set_main_option("version_locations", " ".join(version_locations)) return alembic_config @@ -55,11 +70,12 @@ def configure_alembic(alembic_config): if config.get("SENTRY_DSN"): import sentry_sdk from sentry_sdk.integrations.flask import FlaskIntegration + from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.integrations.celery import CeleryIntegration sentry_sdk.init( config["SENTRY_DSN"], - integrations=[FlaskIntegration(), CeleryIntegration()], + integrations=[FlaskIntegration(), RedisIntegration(), CeleryIntegration()], traces_sample_rate=1.0, ) @@ -67,20 +83,22 @@ def configure_alembic(alembic_config): class MyJSONProvider(DefaultJSONProvider): @staticmethod def default(o): - if isinstance(o, RowProxy): + if isinstance(o, Row): return dict(o) return DefaultJSONProvider.default(o) def create_app(with_external_mods=True): - static_folder = os.environ.get("GEONATURE_STATIC_FOLDER", "../static") - app = Flask(__name__.split(".")[0], static_folder=static_folder) + app = Flask( + __name__.split(".")[0], + root_path=config["ROOT_PATH"], + static_folder=config["STATIC_FOLDER"], + static_url_path=config["STATIC_URL"], + template_folder="geonature/templates", + ) app.config.update(config) - if "SCRIPT_NAME" not in os.environ: - os.environ["SCRIPT_NAME"] = app.config["APPLICATION_ROOT"].rstrip("/") - # Enable deprecation warnings in debug mode if app.debug and not sys.warnoptions: warnings.filterwarnings(action="default", category=DeprecationWarning) @@ -89,6 +107,19 @@ def create_app(with_external_mods=True): app.wsgi_app = SchemeFix(app.wsgi_app, scheme=config.get("PREFERRED_URL_SCHEME")) app.wsgi_app = ProxyFix(app.wsgi_app, x_host=1) app.wsgi_app = RequestID(app.wsgi_app) + if app.config["APPLICATION_ROOT"] != "/": + app.wsgi_app = DispatcherMiddleware( + Response("Not Found", status=404), + {app.config["APPLICATION_ROOT"].rstrip("/"): app.wsgi_app}, + ) + + if config.get("CUSTOM_STATIC_FOLDER"): + app.wsgi_app = SharedDataMiddleware( + app.wsgi_app, + { + "/static": config["CUSTOM_STATIC_FOLDER"], + }, + ) app.json = MyJSONProvider(app) @@ -131,13 +162,16 @@ def load_current_user(): g.current_user = user_from_token(request.cookies["token"]).role except (KeyError, UnreadableAccessRightsError, AccessRightsExpiredError): g.current_user = None + g._permissions_by_user = {} + g._permissions = {} if config.get("SENTRY_DSN"): from sentry_sdk import set_tag, set_user @app.before_request def set_sentry_context(): - set_tag("request.id", request.environ["FLASK_REQUEST_ID"]) + if "FLASK_REQUEST_ID" in request.environ: + set_tag("request.id", request.environ["FLASK_REQUEST_ID"]) if g.current_user: set_user( { @@ -149,19 +183,24 @@ def set_sentry_context(): admin.init_app(app) + # Enable serving of media files + app.add_url_rule( + f"{config['MEDIA_URL']}/", + view_func=lambda filename: send_from_directory(config["MEDIA_FOLDER"], filename), + endpoint="media", + ) + for blueprint_path, url_prefix in [ ("pypnusershub.routes:routes", "/auth"), ("pypn_habref_api.routes:routes", "/habref"), ("pypnusershub.routes_register:bp", "/pypn/register"), ("pypnnomenclature.routes:routes", "/nomenclatures"), + ("ref_geo.routes:routes", "/geo"), ("geonature.core.gn_commons.routes:routes", "/gn_commons"), ("geonature.core.gn_permissions.routes:routes", "/permissions"), - ("geonature.core.gn_permissions.backoffice.views:routes", "/permissions_backoffice"), - ("geonature.core.routes:routes", "/"), ("geonature.core.users.routes:routes", "/users"), ("geonature.core.gn_synthese.routes:routes", "/synthese"), ("geonature.core.gn_meta.routes:routes", "/meta"), - ("geonature.core.ref_geo.routes:routes", "/geo"), ("geonature.core.auth.routes:routes", "/gn_auth"), ("geonature.core.gn_monitoring.routes:routes", "/gn_monitoring"), ("geonature.core.gn_profiles.routes:routes", "/gn_profiles"), @@ -178,14 +217,12 @@ def set_sentry_context(): # Loading third-party modules if with_external_mods: - for module_code_entry in iter_entry_points("gn_module", "code"): - module_code = module_code_entry.resolve() + for module_dist in iter_modules_dist(): + module_code = module_dist.entry_points["code"].load() if module_code in config["DISABLED_MODULES"]: continue try: - module_blueprint = load_entry_point( - module_code_entry.dist, "gn_module", "blueprint" - ) + module_blueprint = module_dist.entry_points["blueprint"].load() except Exception as e: logging.exception(e) logging.warning(f"Unable to load module {module_code}, skipping…") diff --git a/backend/geonature/celery_app.py b/backend/geonature/celery_app.py index 9dd146e9fc..de70b720c8 100644 --- a/backend/geonature/celery_app.py +++ b/backend/geonature/celery_app.py @@ -1,7 +1,6 @@ -from pkg_resources import iter_entry_points - from .app import create_app from .utils.celery import celery_app as app +from .utils.module import iter_modules_dist flask_app = create_app() @@ -15,4 +14,6 @@ def __call__(self, *args, **kwargs): app.Task = ContextTask -app.conf.imports += tuple(ep.module_name for ep in iter_entry_points("gn_module", "tasks")) +app.conf.imports += tuple( + [ep.module for dist in iter_modules_dist() for ep in dist.entry_points.select(name="tasks")] +) diff --git a/backend/geonature/core/admin/__init__.py b/backend/geonature/core/admin/__init__.py index e69de29bb2..c31328c68b 100644 --- a/backend/geonature/core/admin/__init__.py +++ b/backend/geonature/core/admin/__init__.py @@ -0,0 +1 @@ +import geonature.core.gn_permissions.admin diff --git a/backend/geonature/core/admin/admin.py b/backend/geonature/core/admin/admin.py index b1e7bf0694..125b714d18 100644 --- a/backend/geonature/core/admin/admin.py +++ b/backend/geonature/core/admin/admin.py @@ -6,8 +6,8 @@ from geonature.utils.env import db from geonature.utils.config import config -from geonature.core.gn_commons.models import TAdditionalFields -from geonature.core.gn_commons.admin import BibFieldAdmin +from geonature.core.gn_commons.models import TAdditionalFields, TMobileApps, TModules +from geonature.core.gn_commons.admin import BibFieldAdmin, TMobileAppsAdmin, TModulesAdmin from geonature.core.notifications.admin import ( NotificationTemplateAdmin, NotificationCategoryAdmin, @@ -18,100 +18,42 @@ NotificationCategory, NotificationMethod, ) -from geonature.core.gn_permissions.tools import get_scopes_by_action +from pypnnomenclature.models import ( + BibNomenclaturesTypes, + TNomenclatures, +) from pypnnomenclature.admin import ( - BibNomenclaturesTypesAdminConfig, BibNomenclaturesTypesAdmin, - TNomenclaturesAdminConfig, TNomenclaturesAdmin, ) - -class MyHomeView(AdminIndexView): - def is_accessible(self): - if g.current_user is None: - raise Unauthorized # return False leads to Forbidden which is different - return True +from .utils import CruvedProtectedMixin -class CruvedProtectedMixin: +class MyHomeView(AdminIndexView): def is_accessible(self): if g.current_user is None: raise Unauthorized # return False leads to Forbidden which is different return True - def _can_action(self, action): - scope = get_scopes_by_action( - g.current_user.id_role, module_code=self.module_code, object_code=self.object_code - )[action] - return scope == 3 - @property - def can_create(self): - return self._can_action("C") - - @property - def can_edit(self): - return self._can_action("U") - - @property - def can_delete(self): - return self._can_action("D") - - @property - def can_export(self): - return self._can_action("E") - - -class ProtectedBibNomenclaturesTypesAdminConfig( +class ProtectedBibNomenclaturesTypesAdmin( CruvedProtectedMixin, - BibNomenclaturesTypesAdminConfig, + BibNomenclaturesTypesAdmin, ): module_code = "ADMIN" object_code = "NOMENCLATURES" -class ProtectedTNomenclaturesAdminConfig( +class ProtectedTNomenclaturesAdmin( CruvedProtectedMixin, - TNomenclaturesAdminConfig, + TNomenclaturesAdmin, ): module_code = "ADMIN" object_code = "NOMENCLATURES" -class ProtectedBibFieldAdmin( - CruvedProtectedMixin, - BibFieldAdmin, -): - module_code = "ADMIN" - object_code = "ADDITIONAL_FIELDS" - - -class ProtectedNotificationTemplateAdmin( - CruvedProtectedMixin, - NotificationTemplate, -): - module_code = "ADMIN" - object_code = "NOTIFICATIONS" - - -class ProtectedNotificationCategoryAdmin( - CruvedProtectedMixin, - NotificationCategory, -): - module_code = "ADMIN" - object_code = "NOTIFICATIONS" - - -class ProtectedNotificationMethodAdmin( - CruvedProtectedMixin, - NotificationMethod, -): - module_code = "ADMIN" - object_code = "NOTIFICATIONS" - - ## déclaration de la page d'admin admin = Admin( template_mode="bootstrap4", @@ -137,8 +79,8 @@ class ProtectedNotificationMethodAdmin( ## ajout des elements admin.add_view( - ProtectedBibNomenclaturesTypesAdminConfig( - BibNomenclaturesTypesAdmin, + ProtectedBibNomenclaturesTypesAdmin( + BibNomenclaturesTypes, db.session, name="Type de nomenclatures", category="Nomenclatures", @@ -146,23 +88,14 @@ class ProtectedNotificationMethodAdmin( ) admin.add_view( - ProtectedTNomenclaturesAdminConfig( - TNomenclaturesAdmin, + ProtectedTNomenclaturesAdmin( + TNomenclatures, db.session, name="Items de nomenclatures", category="Nomenclatures", ) ) -admin.add_view( - ProtectedBibFieldAdmin( - TAdditionalFields, - db.session, - name="Bibliothèque de champs additionnels", - category="Champs additionnels", - ) -) - # Ajout de la vue pour la gestion des templates de notifications # accès protegé par CruvedProtectedMixin admin.add_view( @@ -192,4 +125,31 @@ class ProtectedNotificationMethodAdmin( ) ) +admin.add_view( + BibFieldAdmin( + TAdditionalFields, + db.session, + name="Champs additionnels", + category="Autres", + ) +) + +admin.add_view( + TMobileAppsAdmin( + TMobileApps, + db.session, + name="Applications mobiles", + category="Autres", + ) +) + +admin.add_view( + TModulesAdmin( + TModules, + db.session, + name="Modules", + category="Autres", + ) +) + flask_admin = admin # for retro-compatibility, usefull for export module for instance diff --git a/backend/geonature/core/admin/utils.py b/backend/geonature/core/admin/utils.py new file mode 100644 index 0000000000..6bc577d4e6 --- /dev/null +++ b/backend/geonature/core/admin/utils.py @@ -0,0 +1,55 @@ +from functools import partial + +from flask import g +from werkzeug.exceptions import Unauthorized + +from geonature.core.gn_permissions.tools import get_scopes_by_action + + +class CruvedProtectedMixin: + def is_accessible(self): + if g.current_user is None: + raise Unauthorized # return False leads to Forbidden which is different + return self._can_action("R") + + def _can_action(self, action): + scope = get_scopes_by_action( + g.current_user.id_role, + module_code=self.module_code, + object_code=getattr(self, "object_code", "ALL"), + )[action] + return scope == 3 + + @property + def can_create(self): + return self._can_action("C") + + @property + def can_edit(self): + return self._can_action("U") + + @property + def can_delete(self): + return self._can_action("D") + + @property + def can_export(self): + return self._can_action("E") + + +# https://github.com/flask-admin/flask-admin/issues/1807 +# https://stackoverflow.com/questions/54638047/correct-way-to-register-flask-admin-views-with-application-factory +class ReloadingIterator: + def __init__(self, iterator_factory): + self.iterator_factory = iterator_factory + + def __iter__(self): + return self.iterator_factory() + + +class DynamicOptionsMixin: + def get_dynamic_options(self, view): + raise NotImplementedError + + def get_options(self, view): + return ReloadingIterator(partial(self.get_dynamic_options, view)) diff --git a/backend/geonature/core/auth/routes.py b/backend/geonature/core/auth/routes.py index cc326add9b..f301130e53 100644 --- a/backend/geonature/core/auth/routes.py +++ b/backend/geonature/core/auth/routes.py @@ -51,14 +51,14 @@ def loginCas(): ) response = utilsrequests.get(url_validate) - user = None + data = None xml_dict = xmltodict.parse(response.content) resp = xml_dict["cas:serviceResponse"] if "cas:authenticationSuccess" in resp: - user = resp["cas:authenticationSuccess"]["cas:user"] - if user: + data = resp["cas:authenticationSuccess"]["cas:user"] + if data: ws_user_url = "{url}/{user}/?verify=false".format( - url=config_cas["CAS_USER_WS"]["URL"], user=user + url=config_cas["CAS_USER_WS"]["URL"], user=data ) try: response = utilsrequests.get( @@ -75,7 +75,7 @@ def loginCas(): "Error with the inpn authentification service", status_code=500 ) info_user = response.json() - user = insert_user_and_org(info_user) + data = insert_user_and_org(info_user) db.session.commit() # creation de la Response @@ -90,7 +90,7 @@ def loginCas(): .one() .id_application ) - token = encode_token(user) + token = encode_token(data) response.set_cookie("token", token, expires=cookie_exp) # User cookie @@ -98,8 +98,8 @@ def loginCas(): if not organism_id: organism_id = Organisme.query.filter_by(nom_organisme="Autre").one().id_organisme current_user = { - "user_login": user["identifiant"], - "id_role": user["id_role"], + "user_login": data["identifiant"], + "id_role": data["id_role"], "id_organisme": organism_id, } response.set_cookie("current_user", str(current_user), expires=cookie_exp) diff --git a/backend/geonature/core/command/create_gn_module.py b/backend/geonature/core/command/create_gn_module.py index c4f82ae98b..3dacfb45b9 100644 --- a/backend/geonature/core/command/create_gn_module.py +++ b/backend/geonature/core/command/create_gn_module.py @@ -4,16 +4,15 @@ import site import importlib from pathlib import Path -import pkg_resources -from pkg_resources import iter_entry_points import click from click import ClickException from geonature.utils.env import ROOT_DIR -from geonature.utils.module import get_dist_from_code, module_db_upgrade +from geonature.utils.module import iter_modules_dist, get_dist_from_code, module_db_upgrade from geonature.core.command.main import main +import geonature.utils.config from geonature.utils.config import config from geonature.utils.command import ( install_frontend_dependencies, @@ -26,8 +25,8 @@ @click.option( "-x", "--x-arg", multiple=True, help="Additional arguments consumed by custom env.py scripts" ) -@click.argument("module_path") -@click.argument("module_code") +@click.argument("module_path", type=click.Path(exists=True, file_okay=False, path_type=Path)) +@click.argument("module_code", required=False) @click.option("--build", type=bool, required=False, default=True) @click.option("--upgrade-db", type=bool, required=False, default=True) def install_gn_module(x_arg, module_path, module_code, build, upgrade_db): @@ -36,19 +35,33 @@ def install_gn_module(x_arg, module_path, module_code, build, upgrade_db): # refresh list of entry points importlib.reload(site) - for entry in sys.path: - pkg_resources.working_set.add_entry(entry) + importlib.reload(geonature.utils.config) - # load python package - module_dist = get_dist_from_code(module_code) - if not module_dist: - raise ClickException(f"Aucun module ayant pour code {module_code} n’a été trouvé") + if module_code: + # load python package + module_dist = get_dist_from_code(module_code) + if not module_dist: + raise ClickException(f"Aucun module ayant pour code {module_code} n’a été trouvé") + else: + for module_dist in iter_modules_dist(): + module = module_dist.entry_points["code"].module + if module not in sys.modules: + path = Path(importlib.import_module(module).__file__) + else: + path = Path(sys.modules[module].__file__) + if module_path.resolve() in path.parents: + module_code = module_dist.entry_points["code"].load() + break + else: + raise ClickException( + f"Impossible de détecter le code du module, essayez de le spécifier." + ) # symlink module in exernal module directory - module_frontend_path = os.path.realpath(f"{module_path}/frontend") + module_frontend_path = (module_path / "frontend").resolve() module_symlink = ROOT_DIR / "frontend" / "external_modules" / module_code.lower() if os.path.exists(module_symlink): - if module_frontend_path != os.path.realpath(os.readlink(module_symlink)): + if module_frontend_path != os.readlink(module_symlink): click.echo(f"Correction du lien symbolique {module_symlink} → {module_frontend_path}") os.unlink(module_symlink) os.symlink(module_frontend_path, module_symlink) @@ -95,15 +108,14 @@ def install_gn_module(x_arg, module_path, module_code, build, upgrade_db): ) @click.argument("module_codes", metavar="[MODULE_CODE]...", nargs=-1) def upgrade_modules_db(directory, sql, tag, x_arg, module_codes): - for module_code_entry in iter_entry_points("gn_module", "code"): - module_code = module_code_entry.resolve() + for module_dist in iter_modules_dist(): + module_code = module_dist.entry_points["code"].load() if module_codes and module_code not in module_codes: continue if module_code in config["DISABLED_MODULES"]: click.echo(f"Omission du module {module_code} (déactivé)") continue click.echo(f"Mise-à-jour du module {module_code}…") - module_dist = module_code_entry.dist if not module_db_upgrade(module_dist, directory, sql, tag, x_arg): click.echo( "Le module est déjà déclaré en base. " diff --git a/backend/geonature/core/command/main.py b/backend/geonature/core/command/main.py index 6a156a7e3f..dffd7ad452 100644 --- a/backend/geonature/core/command/main.py +++ b/backend/geonature/core/command/main.py @@ -5,21 +5,18 @@ import logging from os import environ from collections import ChainMap -from pkg_resources import iter_entry_points import toml import click from flask.cli import run_command from geonature.utils.env import GEONATURE_VERSION -from geonature.utils.command import ( - create_frontend_config, -) +from geonature.utils.module import iter_modules_dist from geonature import create_app -from geonature.core.gn_meta.mtd.mtd_utils import import_all_dataset_af_and_actors from geonature.utils.config import config from geonature.utils.config_schema import GnGeneralSchemaConf, GnPySchemaConf from geonature.utils.command import ( + create_frontend_config, create_frontend_module_config, build_frontend, ) @@ -119,29 +116,20 @@ def update_configuration(modules, build): create_frontend_config() click.secho("OK", fg="green") if modules: - for module_code_entry in iter_entry_points("gn_module", "code"): - module_code = module_code_entry.resolve() + for dist in iter_modules_dist(): + module_code = dist.entry_points["code"].load() click.echo(f" Module {module_code} … ", nl=False) if module_code in config["DISABLED_MODULES"]: click.secho("désactivé, ignoré", fg="white") continue - click.secho("OK", fg="green") create_frontend_module_config(module_code) + click.secho("OK", fg="green") if build: click.echo("Rebuild du frontend …") build_frontend() click.secho("Rebuild du frontend terminé.", fg="green") -@main.command() -@click.argument("table_name") -def import_jdd_from_mtd(table_name): - """ - Import les JDD et CA (et acters associé) à partir d'une table (ou vue) listant les UUID des JDD dans MTD - """ - import_all_dataset_af_and_actors(table_name) - - @main.command() def default_config(): """ @@ -158,3 +146,22 @@ def default_config(): frontend_defaults = GnGeneralSchemaConf().load({}, partial=required_fields) defaults = ChainMap(backend_defaults, frontend_defaults) print(toml.dumps(defaults)) + + +@click.argument("key", type=str, required=False) +@main.command() +def get_config(key=None): + """ + Afficher l’ensemble des paramètres + """ + printed_config = config.copy() + if key: + try: + printed_config = printed_config[key] + except KeyError: + click.secho(f"The key {key} does not exist in config", fg="red") + return + if type(printed_config) is dict: + print(toml.dumps(printed_config)) + else: + print(printed_config) diff --git a/backend/geonature/core/errors.py b/backend/geonature/core/errors.py index 7e9bc96e2f..a3ba71abe3 100644 --- a/backend/geonature/core/errors.py +++ b/backend/geonature/core/errors.py @@ -1,9 +1,12 @@ +from pprint import pformat from urllib.parse import urlparse import sys from flask import current_app, request, json, redirect -from werkzeug.exceptions import Unauthorized, InternalServerError, HTTPException +from werkzeug.exceptions import Unauthorized, InternalServerError, HTTPException, BadRequest from werkzeug.urls import url_encode +from marshmallow.exceptions import ValidationError + # Unauthorized means disconnected # (logged but not allowed to perform an action = Forbidden) @@ -33,6 +36,13 @@ def handle_unauthenticated_request(e): return redirect(f"{base_url}{login_path}?{query_string}") +@current_app.errorhandler(ValidationError) +def handle_validation_error(e): + return handle_http_exception( + BadRequest(description=pformat(e.messages)).with_traceback(sys.exc_info()[2]) + ) + + @current_app.errorhandler(HTTPException) def handle_http_exception(e): response = e.get_response() diff --git a/backend/geonature/core/gn_commons/admin.py b/backend/geonature/core/gn_commons/admin.py index 1295b02e17..3457d22b65 100644 --- a/backend/geonature/core/gn_commons/admin.py +++ b/backend/geonature/core/gn_commons/admin.py @@ -1,12 +1,41 @@ -from flask import current_app +import logging +from flask import current_app, flash from flask_admin.contrib.sqla import ModelView +from flask_admin.form import BaseForm +from wtforms import validators, Form + +from geonature.core.admin.utils import CruvedProtectedMixin from geonature.core.gn_commons.models import TModules -from geonature.core.gn_permissions.models import TObjects +from geonature.core.gn_permissions.models import PermObject +from geonature.core.gn_commons.schemas import TAdditionalFieldsSchema from geonature.utils.env import DB -class BibFieldAdmin(ModelView): +from marshmallow import ValidationError + +log = logging.getLogger() + + +class TAdditionalFieldsForm(BaseForm): + def validate(self, extra_validators=None): + try: + TAdditionalFieldsSchema().load(self.data) + except ValidationError as e: + log.exception("additional field validation error") + flash("The form has errors", "error") + self.field_values.errors = ( + f"Value input must contain a list of dict with value/label key for {self.data['type_widget']} widget ", + ) + return False + return super().validate(extra_validators) + + +class BibFieldAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "ADDITIONAL_FIELDS" + + form_base_class = TAdditionalFieldsForm form_columns = ( "field_name", "field_label", @@ -57,8 +86,8 @@ class BibFieldAdmin(ModelView): ) }, "objects": { - "query_factory": lambda: DB.session.query(TObjects).filter( - TObjects.code_object.in_( + "query_factory": lambda: DB.session.query(PermObject).filter( + PermObject.code_object.in_( current_app.config["ADDITIONAL_FIELDS"]["IMPLEMENTED_OBJECTS"] ) ) @@ -68,11 +97,91 @@ class BibFieldAdmin(ModelView): "bib_nomenclature_type": "Si Type widget = Nomenclature", "field_label": "Label du champ en interface", "field_name": "Nom du champ en base de donnée", - "field_values": "Obligatoire si widget = select/radio/bool_radio (Format JSON : tableau de valeurs ou tableau clé/valeur. Utilisez des doubles quotes pour les valeurs et les clés)", - "default_value": "La valeur par défaut doit être une des valeurs du champs 'Valeurs' ci dessus", + "field_values": """Obligatoire si widget = select/multiselect/checkbox,radio (Format JSON : tableau de 'value/label'. Utilisez des doubles quotes pour les valeurs et les clés). + Exemple [{"label": "trois", "value": 3}, {"label": "quatre", "value": 4}]""", + "default_value": "La valeur par défaut doit être une des valeurs du champs 'Valeurs' ci-dessus. Pour les valeurs textuelles, il n'est pas nécessaire de remettre la valeur entre guillement", "id_list": "Identifiant en BDD de la liste (pour Type widget = taxonomy/observers)", "field_order": "Numéro d'ordonnancement du champs (si plusieurs champs pour le même module/objet/JDD)", "modules": "Module(s) auquel le champs est rattaché. *Obligatoire", "objects": "Objet(s) auquel le champs est rattaché. *Obligatoire", "datasets": "Jeu(x) de donnés auquel le champs est rattaché", } + + +class TMobileAppsAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "MOBILE_APPS" + + column_list = ( + "app_code", + "relative_path_apk", + "url_apk", + "package", + "version_code", + ) + column_labels = { + "app_code": "Code application", + "relative_path_apk": "Chemin relatif de l'APK", + "url_apk": "URL externe de l'APK", + "package": "Nom du paquet", + "version_code": "Code de version", + } + form_columns = ("app_code", "relative_path_apk", "url_apk", "package", "version_code") + column_exclude_list = "id_mobile_app" + + +class TModulesAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "MODULES" + + can_view_details = True + action_disallowed_list = ["delete"] + can_create = False + can_delete = False + + column_searchable_list = ( + "module_code", + "module_label", + ) + column_default_sort = [ + ("module_order", False), + ("id_module", False), + ] + column_sortable_list = ( + "module_order", + "module_code", + "module_label", + ) + + column_list = ( + "module_code", + "module_label", + "module_picto", + "module_order", + ) + column_details_list = ( + "module_code", + "module_label", + "module_desc", + "module_comment", + "module_picto", + "module_doc_url", + "module_order", + ) + form_columns = ( + "module_label", + "module_desc", + "module_comment", + "module_picto", + "module_doc_url", + "module_order", + ) + column_labels = { + "module_code": "Code", + "module_label": "Label", + "module_desc": "Description", + "module_comment": "Commentaire", + "module_picto": "Pictogramme", + "module_doc_url": "URL documentation", + "module_order": "Ordre", + } diff --git a/backend/geonature/core/gn_commons/file_manager.py b/backend/geonature/core/gn_commons/file_manager.py deleted file mode 100644 index 4188bb69d5..0000000000 --- a/backend/geonature/core/gn_commons/file_manager.py +++ /dev/null @@ -1,56 +0,0 @@ -import os -import pathlib -import re -import unicodedata - -from shutil import rmtree -from werkzeug.utils import secure_filename -from flask import current_app - - -def remove_file(filepath): - try: - os.remove(os.path.join(current_app.config["BASE_DIR"], filepath)) - except FileNotFoundError: - pass - except Exception as e: - raise e - - -def rename_file(old_path, new_path): - os.rename( - os.path.join(current_app.config["BASE_DIR"], old_path), - os.path.join(current_app.config["BASE_DIR"], new_path), - ) - return new_path - - -def upload_file(file, file_folder, file_name): - ext = file.filename.rsplit(".", 1)[1] - - filedir = os.path.join(current_app.config["UPLOAD_FOLDER"], file_folder) - - pathlib.Path(os.path.join(current_app.config["BASE_DIR"], filedir)).mkdir( - parents=True, exist_ok=True - ) - - filepath = os.path.join( - filedir, - "{file_name}.{ext}".format( - file_name=removeDisallowedFilenameChars(file_name.rsplit(".", 1)[0]), - ext=ext, - ), - ) - try: - file.save(os.path.join(current_app.config["BASE_DIR"], filepath)) - except FileNotFoundError as e: - raise e - return filepath - - -def removeDisallowedFilenameChars(uncleanString): - cleanedString = secure_filename(uncleanString) - cleanedString = unicodedata.normalize("NFKD", uncleanString) - cleanedString = re.sub("[ ]+", "_", cleanedString) - cleanedString = re.sub("[^0-9a-zA-Z_-]", "", cleanedString) - return cleanedString diff --git a/backend/geonature/core/gn_commons/medias/routes.py b/backend/geonature/core/gn_commons/medias/routes.py index 78e1056ed8..0f0f6762eb 100644 --- a/backend/geonature/core/gn_commons/medias/routes.py +++ b/backend/geonature/core/gn_commons/medias/routes.py @@ -88,13 +88,7 @@ def insert_or_update_media(id_media=None): else: data = request.get_json(silent=True) - try: - m = TMediaRepository(data=data, file=file, id_media=id_media).create_or_update_media() - - except GeoNatureError as e: - return str(e), 400 - - TMediumRepository.sync_medias() + m = TMediaRepository(data=data, file=file, id_media=id_media).create_or_update_media() return m.as_dict() @@ -110,8 +104,6 @@ def delete_media(id_media): TMediaRepository(id_media=id_media).delete() - TMediumRepository.sync_medias() - return {"resp": "media {} deleted".format(id_media)} @@ -124,11 +116,8 @@ def get_media_thumb(id_media, size): media_repo = TMediaRepository(id_media=id_media) m = media_repo.media if not m: - return {"msg": "Media introuvable"}, 404 + raise NotFound("Media introuvable") - try: - url_thumb = media_repo.get_thumbnail_url(size) - except GeoNatureError as e: - return {"msg": str(e)}, 500 + url_thumb = media_repo.get_thumbnail_url(size) return redirect(url_thumb) diff --git a/backend/geonature/core/gn_commons/models/additional_fields.py b/backend/geonature/core/gn_commons/models/additional_fields.py index 4ef1907abf..d449da6a24 100644 --- a/backend/geonature/core/gn_commons/models/additional_fields.py +++ b/backend/geonature/core/gn_commons/models/additional_fields.py @@ -10,7 +10,7 @@ from .base import cor_field_module, cor_field_object, cor_field_dataset from geonature.core.gn_meta.models import TDatasets -from geonature.core.gn_permissions.models import TObjects +from geonature.core.gn_permissions.models import PermObject @serializable @@ -43,16 +43,14 @@ class TAdditionalFields(DB.Model): ) additional_attributes = DB.Column(JSONB) multiselect = DB.Column(DB.Boolean) - key_label = DB.Column(DB.String) - key_value = DB.Column(DB.String) api = DB.Column(DB.String) default_value = DB.Column(DB.String) modules = DB.relationship( "TModules", secondary=cor_field_module, ) - objects = DB.relationship("TObjects", secondary=cor_field_object) - datasets = DB.relationship("TDatasets", secondary=cor_field_dataset) + objects = DB.relationship(PermObject, secondary=cor_field_object) + datasets = DB.relationship(TDatasets, secondary=cor_field_dataset) def __str__(self): return f"{self.field_label} ({self.description})" diff --git a/backend/geonature/core/gn_commons/models/base.py b/backend/geonature/core/gn_commons/models/base.py index 6c90c929cd..b55432459f 100644 --- a/backend/geonature/core/gn_commons/models/base.py +++ b/backend/geonature/core/gn_commons/models/base.py @@ -2,6 +2,7 @@ Modèles du schéma gn_commons """ import os +from pathlib import Path from flask import current_app from sqlalchemy import ForeignKey @@ -16,7 +17,6 @@ from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB -from geonature.core.gn_commons.file_manager import rename_file @serializable @@ -77,9 +77,10 @@ class TModules(DB.Model): __tablename__ = "t_modules" __table_args__ = {"schema": "gn_commons"} - type = DB.Column(DB.Unicode) + type = DB.Column(DB.Unicode, nullable=False, server_default="base") __mapper_args__ = { "polymorphic_on": "type", + "polymorphic_identity": "base", } id_module = DB.Column(DB.Integer, primary_key=True) @@ -101,7 +102,7 @@ class TModules(DB.Model): meta_update_date = DB.Column(DB.DateTime) objects = DB.relationship( - "TObjects", secondary=lambda: _resolve_import_cor_object_module(), backref="modules" + "PermObject", secondary=lambda: _resolve_import_cor_object_module(), backref="modules" ) # relationship datasets add via backref @@ -109,7 +110,7 @@ def __str__(self): return self.module_label.capitalize() -@serializable +@serializable(exclude=["base_dir"]) class TMedias(DB.Model): __tablename__ = "t_medias" __table_args__ = {"schema": "gn_commons"} @@ -139,39 +140,38 @@ class TMedias(DB.Model): meta_create_date = DB.Column(DB.DateTime) meta_update_date = DB.Column(DB.DateTime) + @staticmethod + def base_dir(): + return Path(current_app.config["MEDIA_FOLDER"]) / "attachments" + def __before_commit_delete__(self): # déclenché sur un DELETE : on supprime le fichier - if self.media_path and os.path.exists( - os.path.join(current_app.config["BASE_DIR"] + "/" + self.media_path) - ): + if self.media_path and (self.base_dir() / self.media_path).exists(): # delete file self.remove_file() # delete thumbnail self.remove_thumbnails() - def remove_file(self): + def remove_file(self, move=True): if not self.media_path: return - initial_path = self.media_path - (inv_file_name, inv_file_path) = initial_path[::-1].split("/", 1) - file_name = inv_file_name[::-1] - file_path = inv_file_path[::-1] - - try: - self.media_path = rename_file( - self.media_path, "{}/deleted_{}".format(file_path, file_name) - ) - except FileNotFoundError: - raise Exception("Unable to delete file {}".format(initial_path)) + path = self.base_dir() / self.media_path + if move: + new_path = path.parent / f"deleted_{path.name}" + path.rename(new_path) + self.media_path = str(new_path.relative_to(self.base_dir())) + else: + path.unlink() def remove_thumbnails(self): # delete thumbnail test sur nom des fichiers avec id dans le dossier thumbnail dir_thumbnail = os.path.join( - current_app.config["BASE_DIR"], - current_app.config["UPLOAD_FOLDER"], + str(self.base_dir()), "thumbnails", str(self.id_table_location), ) + if not os.path.isdir(dir_thumbnail): + return for f in os.listdir(dir_thumbnail): if f.split("_")[0] == str(self.id_media): abs_path = os.path.join(dir_thumbnail, f) diff --git a/backend/geonature/core/gn_commons/repositories.py b/backend/geonature/core/gn_commons/repositories.py index 7694925b5b..bef01bdd68 100644 --- a/backend/geonature/core/gn_commons/repositories.py +++ b/backend/geonature/core/gn_commons/repositories.py @@ -1,19 +1,19 @@ import os import datetime import requests -import pathlib +from pathlib import Path from PIL import Image from io import BytesIO from flask import current_app, url_for +from werkzeug.utils import secure_filename from sqlalchemy import and_ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound from pypnnomenclature.models import TNomenclatures -from geonature.utils.env import DB from geonature.core.gn_commons.models import TMedias, BibTablesLocation -from geonature.core.gn_commons.file_manager import upload_file, remove_file, rename_file +from geonature.utils.env import DB from geonature.utils.errors import GeoNatureError @@ -54,10 +54,7 @@ def create_or_update_media(self): - Stockage du fichier """ if self.new: - try: - self._persist_media_db() - except Exception as e: - raise e + self._persist_media_db() # Si le média à un fichier associé if self.file: @@ -80,7 +77,7 @@ def create_or_update_media(self): and (self.data["isFile"] is not True) and (self.media.media_path is not None) ): - remove_file(self.media.media_path) + self.media.remove(move=False) self.media.remove_thumbnails() # Si le média avait une url @@ -125,7 +122,7 @@ def _persist_media_db(self): raise Exception("Errors {}".format(exp.args)) def absolute_file_path(self, thumbnail_height=None): - return os.path.join(current_app.config["BASE_DIR"], self.file_path(thumbnail_height)) + return str(TMedias.base_dir() / self.file_path(thumbnail_height)) def test_video_link(self): media_type = self.media_type() @@ -161,7 +158,6 @@ def test_header_content_type(self, content_type): return True def test_url(self): - try: if not self.data["media_url"]: return @@ -196,14 +192,12 @@ def file_path(self, thumbnail_height=None): file_path = self.media.media_path else: file_path = os.path.join( - current_app.config["UPLOAD_FOLDER"], str(self.media.id_table_location), "{}.jpg".format(self.media.id_media), ) if thumbnail_height: file_path = os.path.join( - current_app.config["UPLOAD_FOLDER"], "thumbnails", str(self.media.id_table_location), "{}_thumbnail_{}.jpg".format(self.media.id_media, thumbnail_height), @@ -224,15 +218,12 @@ def upload_file(self): self.media.remove_thumbnails() # @TODO récupérer les exceptions - filepath = upload_file( - self.file, - str(self.media.id_table_location), - "{id_media}_{file_name}".format( - id_media=self.media.id_media, file_name=self.file.filename - ), - ) + filename = "{}_{}".format(self.media.id_media, secure_filename(self.file.filename)) + filedir = TMedias.base_dir() / str(self.media.id_table_location) + filedir.mkdir(parents=True, exist_ok=True) + self.file.save(str(filedir / filename)) - return filepath + return os.path.join(str(self.media.id_table_location), filename) def is_img(self): return self.media_type() == "Photo" @@ -257,24 +248,6 @@ def get_image(self): return image - def get_image_with_exp(self): - """ - Fonction qui tente de récupérer une image - et qui lance des exceptions en cas d'erreur - """ - - try: - return self.get_image() - except Exception: - if self.media.media_path: - raise GeoNatureError( - "Le fichier fournit ne contient pas une image valide" - ) from Exception - else: - raise GeoNatureError( - "L'URL renseignée ne contient pas une image valide" - ) from Exception - def has_thumbnails(self): """ Test si la liste des thumbnails @@ -302,20 +275,20 @@ def create_thumbnails(self): if self.has_thumbnails(): return - image = self.get_image_with_exp() + image = self.get_image() for thumbnail_height in self.thumbnail_sizes: self.create_thumbnail(thumbnail_height, image) def create_thumbnail(self, size, image=None): if not image: - image = self.get_image_with_exp() + image = self.get_image() image_thumb = image.copy() width = size / image.size[1] * image.size[0] image_thumb.thumbnail((width, size)) thumb_path = self.absolute_file_path(size) - pathlib.Path("/".join(thumb_path.split("/")[:-1])).mkdir(parents=True, exist_ok=True) + Path(thumb_path).parent.mkdir(parents=True, exist_ok=True) if image.mode in ("RGBA", "P"): image_thumb = image_thumb.convert("RGB") @@ -335,11 +308,9 @@ def get_thumbnail_url(self, size): thumb_path = self.absolute_file_path(size) # Get relative path - relative_path = os.path.relpath( - thumb_path, os.path.join(current_app.config["BASE_DIR"], "static") - ) + relative_path = os.path.relpath(thumb_path, current_app.config["MEDIA_FOLDER"]) # Get URL - thumb_url = url_for("static", filename=relative_path) + thumb_url = url_for("media", filename=relative_path) return thumb_url def delete(self): @@ -350,20 +321,10 @@ def delete(self): initial_path = self.media.media_path if self.media.media_path and not current_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: + self.media.__before_commit_delete__() - try: - self.media.__before_commit_delete__() - - except FileNotFoundError: - raise Exception("Unable to delete file") - - # Suppression du média dans la base - try: - DB.session.delete(self.media) - DB.session.commit() - except Exception: - if initial_path: - new_path = rename_file(self.media.media_path, initial_path) + DB.session.delete(self.media) + DB.session.commit() def _load_from_id(self, id_media): """ @@ -420,15 +381,13 @@ def sync_medias(): # liste des id des medias fichiers liste_fichiers = [] - search_path = pathlib.Path( - current_app.config["BASE_DIR"], current_app.config["UPLOAD_FOLDER"] - ) - for (repertoire, sous_repertoires, fichiers) in os.walk(search_path): + search_path = TMedias.base_dir() + for repertoire, sous_repertoires, fichiers in os.walk(search_path): for f in fichiers: id_media = f.split("_")[0] try: id_media = int(id_media) - f_data = {"id_media": id_media, "path": pathlib.Path(repertoire, f)} + f_data = {"id_media": id_media, "path": Path(repertoire, f)} liste_fichiers.append(f_data) except ValueError: pass @@ -454,9 +413,8 @@ def sync_medias(): if "thumbnail" in str(f_data["path"]): os.remove(f_data["path"]) else: - deleted_paths = str(f_data["path"]).split("/") - deleted_paths[-1] = "deleted_" + deleted_paths[-1] - rename_file(f_data["path"], "/".join(deleted_paths)) + p = Path(f_data["path"]) + p.rename(p.parent / f"deleted_{p.name}") def get_table_location_id(schema_name, table_name): diff --git a/backend/geonature/core/gn_commons/routes.py b/backend/geonature/core/gn_commons/routes.py index 906a1d6e29..6d8d7aff13 100644 --- a/backend/geonature/core/gn_commons/routes.py +++ b/backend/geonature/core/gn_commons/routes.py @@ -1,7 +1,8 @@ import json from operator import or_ +from pathlib import Path -from flask import Blueprint, request, current_app, g +from flask import Blueprint, request, current_app, g, url_for from flask.json import jsonify from werkzeug.exceptions import Forbidden, Conflict import requests @@ -19,11 +20,13 @@ ) from geonature.core.gn_commons.repositories import TMediaRepository from geonature.core.gn_commons.repositories import get_table_location_id -from geonature.core.gn_permissions.models import TObjects from geonature.utils.env import DB, db, BACKEND_DIR +from geonature.utils.config import config_frontend, config from geonature.core.gn_permissions import decorators as permissions from geonature.core.gn_permissions.decorators import login_required -from geonature.core.gn_permissions.tools import get_scopes_by_action +from geonature.core.gn_permissions.tools import get_scope +import geonature.core.gn_commons.tasks # noqa: F401 + from shapely.geometry import asShape from geoalchemy2.shape import from_shape from geonature.utils.errors import ( @@ -38,6 +41,14 @@ from .medias.routes import * +@routes.route("/config", methods=["GET"]) +def config_route(): + """ + Returns geonature configuration + """ + return config_frontend + + @routes.route("/modules", methods=["GET"]) @login_required def list_modules(): @@ -48,33 +59,49 @@ def list_modules(): """ params = request.args q = TModules.query.options(joinedload(TModules.objects)) + exclude = current_app.config["DISABLED_MODULES"] if "exclude" in params: - q = q.filter(TModules.module_code.notin_(params.getlist("exclude"))) + exclude.extend(params.getlist("exclude")) + q = q.filter(TModules.module_code.notin_(exclude)) q = q.order_by(TModules.module_order.asc()).order_by(TModules.module_label.asc()) modules = q.all() allowed_modules = [] for module in modules: - if module.module_code in current_app.config["DISABLED_MODULES"]: - continue - cruved = get_scopes_by_action(module_code=module.module_code) - if cruved["R"] > 0: - module_dict = module.as_dict(fields=["objects"]) - module_dict["cruved"] = cruved - if module.active_frontend: - module_dict["module_url"] = "{}/#/{}".format( - current_app.config["URL_APPLICATION"], module.module_path - ) - else: - module_dict["module_url"] = module.module_external_url - module_dict["module_objects"] = {} - # get cruved for each object - for obj_dict in module_dict["objects"]: - obj_code = obj_dict["code_object"] - obj_dict["cruved"] = get_scopes_by_action( + module_allowed = False + # HACK : on a besoin d'avoir le module GeoNature en front pour l'URL de la doc + if module.module_code == "GEONATURE": + module_allowed = True + module_dict = module.as_dict(fields=["objects"]) + # TODO : use has_any_permissions instead - must refactor the front + module_dict["cruved"] = { + action: get_scope(action, module_code=module.module_code, bypass_warning=True) + for action in "CRUVED" + } + if any(module_dict["cruved"].values()): + module_allowed = True + if module.active_frontend: + module_dict["module_url"] = "{}/#/{}".format( + current_app.config["URL_APPLICATION"], module.module_path + ) + else: + module_dict["module_url"] = module.module_external_url + module_dict["module_objects"] = {} + # get cruved for each object + for obj_dict in module_dict["objects"]: + obj_code = obj_dict["code_object"] + obj_dict["cruved"] = { + action: get_scope( + action, module_code=module.module_code, object_code=obj_code, + bypass_warning=True, ) - module_dict["module_objects"][obj_code] = obj_dict + for action in "CRUVED" + } + if any(obj_dict["cruved"].values()): + module_allowed = True + module_dict["module_objects"][obj_code] = obj_dict + if module_allowed: allowed_modules.append(module_dict) return jsonify(allowed_modules) @@ -131,7 +158,6 @@ def get_additional_fields(): q = q.filter(TAdditionalFields.datasets.any(id_dataset=params["id_dataset"])) if "module_code" in params: if len(params["module_code"].split(",")) > 1: - ors = [ TAdditionalFields.modules.any(module_code=module_code) for module_code in params["module_code"].split(",") @@ -181,16 +207,13 @@ def get_t_mobile_apps(): app_dict["settings"] = {} #  if local if app.relative_path_apk: - app_dict["url_apk"] = "{}/{}".format( - current_app.config["API_ENDPOINT"], app.relative_path_apk + relative_apk_path = Path("mobile", app.relative_path_apk) + app_dict["url_apk"] = url_for("media", filename=str(relative_apk_path), _external=True) + relative_settings_path = relative_apk_path.parent / "settings.json" + app_dict["url_settings"] = url_for( + "media", filename=relative_settings_path, _external=True ) - relative_path_dir = app.relative_path_apk.rsplit("/", 1)[0] - app_dict["url_settings"] = "{}/{}/{}".format( - current_app.config["API_ENDPOINT"], - relative_path_dir, - "settings.json", - ) - settings_file = BACKEND_DIR / relative_path_dir / "settings.json" + settings_file = Path(current_app.config["MEDIA_FOLDER"]) / relative_settings_path with settings_file.open() as f: app_dict["settings"] = json.load(f) mobile_apps.append(app_dict) @@ -206,7 +229,6 @@ def get_t_mobile_apps(): @json_resp # schema_dot_table gn_commons.t_modules def api_get_id_table_location(schema_dot_table): - schema_name = schema_dot_table.split(".")[0] table_name = schema_dot_table.split(".")[1] return get_table_location_id(schema_name, table_name) @@ -216,7 +238,7 @@ def api_get_id_table_location(schema_dot_table): # Gestion des lieux (places) # ############################## @routes.route("/places", methods=["GET"]) -@permissions.check_cruved_scope("R") +@login_required def list_places(): places = TPlaces.query.filter_by(id_role=g.current_user.id_role).all() return jsonify([p.as_geofeature() for p in places]) @@ -224,7 +246,7 @@ def list_places(): @routes.route("/place", methods=["POST"]) # XXX best practices recommend plural nouns @routes.route("/places", methods=["POST"]) -@permissions.check_cruved_scope("C") +@login_required def add_place(): data = request.get_json() # FIXME check data validity! @@ -250,7 +272,7 @@ def add_place(): "/place/", methods=["DELETE"] ) # XXX best practices recommend plural nouns @routes.route("/places/", methods=["DELETE"]) -@permissions.check_cruved_scope("D") +@login_required def delete_place(id_place): place = TPlaces.query.get_or_404(id_place) if g.current_user.id_role != place.id_role: diff --git a/backend/geonature/core/gn_commons/schemas.py b/backend/geonature/core/gn_commons/schemas.py index 849062991f..58b1ec1f40 100644 --- a/backend/geonature/core/gn_commons/schemas.py +++ b/backend/geonature/core/gn_commons/schemas.py @@ -1,9 +1,15 @@ -from marshmallow import pre_load, fields, EXCLUDE +from marshmallow import Schema, pre_load, fields, EXCLUDE from pypnnomenclature.schemas import NomenclatureSchema from pypnusershub.schemas import UserSchema from geonature.utils.env import MA -from geonature.core.gn_commons.models import TModules, TMedias, TValidations +from geonature.core.gn_commons.models import ( + TModules, + TMedias, + TValidations, + TAdditionalFields, + BibWidgets, +) class ModuleSchema(MA.SQLAlchemyAutoSchema): @@ -14,7 +20,6 @@ class Meta: "module_picto", "module_desc", "module_group", - "module_path", "module_external_url", "module_target", "module_comment", @@ -49,3 +54,25 @@ class Meta: include_fk = True validation_label = fields.Nested(NomenclatureSchema, dump_only=True) validator_role = MA.Nested(UserSchema, dump_only=True) + + +class BibWidgetSchema(MA.SQLAlchemyAutoSchema): + class Meta: + model = BibWidgets + load_instance = True + + +class LabelValueDict(Schema): + label = fields.Str() + value = fields.Raw() + + +class TAdditionalFieldsSchema(MA.SQLAlchemyAutoSchema): + class Meta: + model = TAdditionalFields + load_instance = True + + def load(self, data, *, many=None, **kwargs): + if data["type_widget"].widget_name in ("select", "checkbox", "radio", "multiselect"): + LabelValueDict(many=True).load(data["field_values"]) + return super().load(data, many=many, unknown=EXCLUDE) diff --git a/backend/geonature/core/gn_commons/tasks.py b/backend/geonature/core/gn_commons/tasks.py new file mode 100644 index 0000000000..09544a0d98 --- /dev/null +++ b/backend/geonature/core/gn_commons/tasks.py @@ -0,0 +1,33 @@ +from celery.schedules import crontab +from celery.utils.log import get_task_logger + +from geonature.core.gn_commons.repositories import TMediumRepository +from geonature.utils.celery import celery_app +from geonature.utils.config import config + +logger = get_task_logger(__name__) + + +@celery_app.on_after_finalize.connect +def setup_periodic_tasks(sender, **kwargs): + ct = config["MEDIA_CLEAN_CRONTAB"] + if ct: + minute, hour, day_of_month, month_of_year, day_of_week = ct.split(" ") + sender.add_periodic_task( + crontab( + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + month_of_year=month_of_year, + ), + clean_attachments.s(), + name="clean medias", + ) + + +@celery_app.task(bind=True) +def clean_attachments(self): + logger.info("Cleaning medias...") + TMediumRepository.sync_medias() + logger.info("Medias cleaned") diff --git a/backend/geonature/core/gn_commons/validation/routes.py b/backend/geonature/core/gn_commons/validation/routes.py index 6c975dc3a9..3a1ad3bcc9 100644 --- a/backend/geonature/core/gn_commons/validation/routes.py +++ b/backend/geonature/core/gn_commons/validation/routes.py @@ -18,7 +18,7 @@ @routes.route("/history/", methods=["GET"]) -@permissions.check_cruved_scope("R") +@permissions.check_cruved_scope("R", module_code="SYNTHESE") @json_resp def get_hist(uuid_attached_row): # Test if uuid_attached_row is uuid diff --git a/backend/geonature/core/gn_meta/models.py b/backend/geonature/core/gn_meta/models.py index 1fda1d57cb..ad9b11efb6 100644 --- a/backend/geonature/core/gn_meta/models.py +++ b/backend/geonature/core/gn_meta/models.py @@ -1,15 +1,11 @@ import datetime from uuid import UUID +from packaging import version from flask import g -from flask_sqlalchemy import BaseQuery -from geonature.core.gn_permissions.tools import ( - cruved_scope_for_user_in_module, - get_scopes_by_action, -) -from geonature.utils.errors import GeonatureApiError +import flask_sqlalchemy import sqlalchemy as sa -from sqlalchemy import ForeignKey, or_ +from sqlalchemy import ForeignKey, or_, and_ from sqlalchemy.sql import select, func, exists from sqlalchemy.orm import relationship, exc, synonym from sqlalchemy.dialects.postgresql import UUID as UUIDType @@ -17,38 +13,54 @@ from sqlalchemy.schema import FetchedValue from utils_flask_sqla.generic import testDataType from werkzeug.exceptions import BadRequest, NotFound -from werkzeug.datastructures import MultiDict +import marshmallow as ma + +if version.parse(flask_sqlalchemy.__version__) >= version.parse("3"): + from flask_sqlalchemy.query import Query +else: + from flask_sqlalchemy import BaseQuery as Query from pypnnomenclature.models import TNomenclatures from pypnusershub.db.models import User, Organisme from utils_flask_sqla.serializers import serializable +from geonature.utils.errors import GeonatureApiError from geonature.utils.env import DB, db +from geonature.core.gn_permissions.tools import get_scopes_by_action from geonature.core.gn_commons.models import cor_field_dataset, cor_module_dataset from ref_geo.models import LAreas -class FilterMixin: - @classmethod - def compute_filter(cls, **kwargs): - f = sa.true() - for key, value in kwargs.items(): - if "." in key: - rel_name, key = key.split(".", 1) - try: - rel = getattr(cls, rel_name) - except AttributeError: - continue - remote_cls = rel.property.mapper.class_ - if not hasattr(remote_cls, "compute_filter"): - continue - _f = remote_cls.compute_filter(**{key: value}) - if rel.property.uselist: - f &= rel.any(_f) - else: - f &= rel.has(_f) - return f +class DateFilterSchema(ma.Schema): + year = ma.fields.Integer() + month = ma.fields.Integer() + day = ma.fields.Integer() + + +class MetadataFilterSchema(ma.Schema): + class Meta: + unknown = ma.EXCLUDE + + uuid = ma.fields.UUID(allow_none=True) + name = ma.fields.String() + date = ma.fields.Nested(DateFilterSchema) + person = ma.fields.Integer() + organism = ma.fields.Integer() + areas = ma.fields.List(ma.fields.Integer()) + search = ma.fields.String() + + @ma.post_load(pass_many=False) + def convert_date(self, data, **kwargs): + if "date" in data: + date = data["date"] + try: + data["date"] = datetime.date( + year=date["year"], month=date["month"], day=date["day"] + ) + except TypeError as exc: + raise ma.ValidationError(*exc.args, field_name="date") from exc + return data class CorAcquisitionFrameworkObjectif(DB.Model): @@ -221,33 +233,8 @@ class CorDatasetTerritory(DB.Model): ) -class CruvedMixin: - """ - Classe abstraite permettant d'ajouter des méthodes de - contrôle d'accès à la donnée des class TDatasets et TAcquisitionFramework - """ - - def get_object_cruved(self, user_cruved): - """ - Return the user's cruved for a Model instance. - Use in the map-list interface to allow or not an action - params: - - user_cruved: object retourner by cruved_for_user_in_app(user) {'C': '2', 'R':'3' etc...} - - id_object (int): id de l'objet sur lqurqul on veut vérifier le CRUVED (self.id_dataset/ self.id_ca) - - id_role: identifiant de la personne qui demande la route - - id_object_users_actor (list): identifiant des objects ou l'utilisateur est lui même acteur - - id_object_organism_actor (list): identifiants des objects ou l'utilisateur ou son organisme sont acteurs - - Return: dict {'C': True, 'R': False ...} - """ - return { - action: self.has_instance_permission(int(level)) - for action, level in user_cruved.items() - } - - @serializable -class TBibliographicReference(CruvedMixin, db.Model): +class TBibliographicReference(db.Model): __tablename__ = "t_bibliographical_references" __table_args__ = {"schema": "gn_meta"} id_bibliographic_reference = DB.Column(DB.Integer, primary_key=True) @@ -259,11 +246,11 @@ class TBibliographicReference(CruvedMixin, db.Model): publication_reference = DB.Column(DB.Unicode) -class TDatasetsQuery(BaseQuery): +class TDatasetsQuery(Query): def _get_read_scope(self, user=None): if user is None: user = g.current_user - cruved = get_scopes_by_action(id_role=user.id_role, module_code="GEONATURE") + cruved = get_scopes_by_action(id_role=user.id_role, module_code="METADATA") return cruved["R"] def _get_create_scope(self, module_code, user=None): @@ -297,31 +284,79 @@ def filter_by_scope(self, scope, user=None): self = self.filter(or_(*ors)) return self - def filter_by_params(self, params: MultiDict = MultiDict()): - if "active" in params: - self = self.filter(TDatasets.active == bool(params["active"])) - params.pop("active") - table_columns = TDatasets.__table__.columns - if "orderby" in params: - try: - orderCol = getattr(table_columns, params.pop("orderby")) - self = self.order_by(orderCol) - except AttributeError: - raise BadRequest("the attribute to order on does not exist") - if "module_code" in params: - self = self.filter(TDatasets.modules.any(module_code=params.pop("module_code"))) - # Generic Filters - for key, values in params.lists(): + def filter_by_params(self, params={}, _af_search=True): + class DatasetFilterSchema(MetadataFilterSchema): + active = ma.fields.Boolean() + orderby = ma.fields.String() + module_code = ma.fields.String() + id_acquisition_frameworks = ma.fields.List(ma.fields.Integer(), allow_none=True) + + params = DatasetFilterSchema().load(params) + + active = params.get("active") + if active is not None: + self = self.filter(TDatasets.active == active) + + module_code = params.get("module_code") + if module_code: + self = self.filter(TDatasets.modules.any(module_code=module_code)) + + af_ids = params.get("id_acquisition_frameworks") + if af_ids: + self = self.filter( + sa.or_(*[TDatasets.id_acquisition_framework == af_id for af_id in af_ids]) + ) + + uuid = params.get("uuid") + if uuid: + self = self.filter(TDatasets.unique_dataset_id == uuid) + + name = params.get("name") + if name: + self = self.filter(TDatasets.dataset_name.ilike(f"%{name}%")) + + date = params.get("date") + if date: + self = self.filter(sa.cast(TDatasets.meta_create_date, sa.DATE) == date) + + actors = [] + person = params.get("person") + if person: + actors.append(TDatasets.cor_dataset_actor.any(CorDatasetActor.id_role == person)) + organism = params.get("organism") + if organism: + actors.append(TDatasets.cor_dataset_actor.any(CorDatasetActor.id_organism == organism)) + if actors: + self = self.filter(sa.or_(*actors)) + + areas = params.get("areas") + if areas: + self = self.filter_by_areas(areas) + + search = params.get("search") + if search: + ors = [ + TDatasets.dataset_name.ilike(f"%{search}%"), + sa.cast(TDatasets.id_dataset, sa.String) == search, + ] + # enable uuid search only with at least 5 characters + if len(search) >= 5: + ors.append(sa.cast(TDatasets.unique_dataset_id, sa.String).like(f"{search}%")) try: - col = getattr(TDatasets, key) - except AttributeError: - raise BadRequest(f"Column {key} does not exist") - col = getattr(table_columns, key) - for v in values: - testT = testDataType(v, col.type, key) - if testT: - raise BadRequest(testT) - ors = [col == v for v in values] + date = datetime.datetime.strptime(search, "%d/%m/%Y").date() + except ValueError: + pass + else: + ors.append(sa.cast(TDatasets.meta_create_date, sa.DATE) == date) + if _af_search: + ors.append( + TDatasets.acquisition_framework.has( + TAcquisitionFramework.query.filter_by_params( + {"search": search}, + _ds_search=False, + ).whereclause + ) + ) self = self.filter(or_(*ors)) return self @@ -347,13 +382,13 @@ def filter_by_areas(self, areas): from geonature.core.gn_synthese.models import Synthese areaFilter = [] - for type_area, id_area in areas: - areaFilter.append(sa.and_(LAreas.id_type == type_area, LAreas.id_area == id_area)) + for id_area in areas: + areaFilter.append(LAreas.id_area == id_area) return self.filter(TDatasets.synthese_records.any(Synthese.areas.any(sa.or_(*areaFilter)))) @serializable(exclude=["user_actors", "organism_actors"]) -class TDatasets(CruvedMixin, FilterMixin, db.Model): +class TDatasets(db.Model): __tablename__ = "t_datasets" __table_args__ = {"schema": "gn_meta"} query_class = TDatasetsQuery @@ -409,8 +444,8 @@ class TDatasets(CruvedMixin, FilterMixin, db.Model): ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), default=lambda: TNomenclatures.get_default_nomenclature("RESOURCE_TYP"), ) - meta_create_date = DB.Column(DB.DateTime) - meta_update_date = DB.Column(DB.DateTime) + meta_create_date = DB.Column(DB.DateTime, server_default=FetchedValue()) + meta_update_date = DB.Column(DB.DateTime, server_default=FetchedValue()) active = DB.Column(DB.Boolean, default=True) validable = DB.Column(DB.Boolean, server_default=FetchedValue()) id_digitizer = DB.Column(DB.Integer, ForeignKey(User.id_role)) @@ -491,7 +526,7 @@ def has_instance_permission(self, scope, _through_af=True): if scope == 0: return False elif scope in (1, 2): - if g.current_user == self.digitizer or g.current_user in self.user_actors: + if g.current_user.id_role == self.id_digitizer or g.current_user in self.user_actors: return True if scope == 2 and g.current_user.organisme in self.organism_actors: return True @@ -520,29 +555,13 @@ def get_uuid(id_dataset): .scalar() ) - @classmethod - def compute_filter(cls, **kwargs): - f = super().compute_filter(**kwargs) - uuid = kwargs.get("uuid") - if uuid is not None: - try: - uuid = UUID(uuid.strip()) - except TypeError: - pass - else: - f &= TDatasets.unique_dataset_id == uuid - name = kwargs.get("name") - if name is not None: - f &= TDatasets.dataset_name.ilike(f"%{name}%") - return f - -class TAcquisitionFrameworkQuery(BaseQuery): - def _get_read_scope(self): - cruved, herited = cruved_scope_for_user_in_module( - id_role=g.current_user.id_role, module_code="GEONATURE" - ) - return int(cruved["R"]) +class TAcquisitionFrameworkQuery(Query): + def _get_read_scope(self, user=None): + if user is None: + user = g.current_user + cruved = get_scopes_by_action(id_role=user.id_role, module_code="METADATA") + return cruved["R"] def filter_by_scope(self, scope, user=None): if user is None: @@ -585,21 +604,88 @@ def filter_by_areas(self, areas): ), ) - def filter_by_params(self, params={}): + def filter_by_params(self, params={}, _ds_search=True): # XXX frontend retro-compatibility - selector = params.get("selector") - areas = params.pop("areas", None) - if selector == "ds": - params = {f"datasets.{key}": value for key, value in params.items()} - f = TAcquisitionFramework.compute_filter(**params) - qs = self.filter(f) + if params.get("selector") == "ds": + ds_params = params + params = {"datasets": ds_params} + if "search" in ds_params: + params["search"] = ds_params.pop("search") + ds_params = params.get("datasets") + if ds_params: + ds_filter = TDatasets.query.filter_by_params(ds_params).whereclause + if ds_filter is not None: # do not exclude AF without any DS + self = self.filter(TAcquisitionFramework.datasets.any(ds_filter)) + + params = MetadataFilterSchema().load(params) + + uuid = params.get("uuid") + if uuid: + self = self.filter(TAcquisitionFramework.unique_acquisition_framework_id == uuid) + + name = params.get("name") + if name: + self = self.filter(TAcquisitionFramework.acquisition_framework_name.ilike(f"%{name}%")) + + date = params.get("date") + if date: + self = self.filter(TAcquisitionFramework.acquisition_framework_start_date == date) + + actors = [] + person = params.get("person") + if person: + actors.append( + TAcquisitionFramework.cor_af_actor.any( + CorAcquisitionFrameworkActor.id_role == person + ) + ) + organism = params.get("organism") + if organism: + actors.append( + TAcquisitionFramework.cor_af_actor.any( + CorAcquisitionFrameworkActor.id_organism == organism + ) + ) + if actors: + self = self.filter(sa.or_(*actors)) + + areas = params.get("areas") if areas: - qs = qs.filter_by_areas(areas) - return qs + self = self.filter_by_areas(areas) + + search = params.get("search") + if search: + ors = [ + TAcquisitionFramework.acquisition_framework_name.ilike(f"%{search}%"), + sa.cast(TAcquisitionFramework.id_acquisition_framework, sa.String) == search, + ] + # enable uuid search only with at least 5 characters + if len(search) >= 5: + ors.append( + sa.cast(TAcquisitionFramework.unique_acquisition_framework_id, sa.String).like( + f"{search}%" + ) + ) + try: + date = datetime.datetime.strptime(search, "%d/%m/%Y").date() + except ValueError: + pass + else: + ors.append(TAcquisitionFramework.acquisition_framework_start_date == date) + if _ds_search: + ors.append( + TAcquisitionFramework.datasets.any( + TDatasets.query.filter_by_params( + {"search": search}, _af_search=False + ).whereclause + ), + ) + self = self.filter(sa.or_(*ors)) + return self @serializable(exclude=["user_actors", "organism_actors"]) -class TAcquisitionFramework(CruvedMixin, FilterMixin, db.Model): +class TAcquisitionFramework(db.Model): __tablename__ = "t_acquisition_frameworks" __table_args__ = {"schema": "gn_meta"} query_class = TAcquisitionFrameworkQuery @@ -745,7 +831,7 @@ def has_instance_permission(self, scope, _through_ds=True): if scope == 0: return False elif scope in (1, 2): - if g.current_user == self.creator or g.current_user in self.user_actors: + if g.current_user.id_role == self.id_digitizer or g.current_user in self.user_actors: return True if scope == 2 and g.current_user.organisme in self.organism_actors: return True @@ -805,22 +891,6 @@ def get_user_af(user, only_query=False, only_user=False): data = q.all() return list(set([d.id_acquisition_framework for d in data])) - @classmethod - def compute_filter(cls, **kwargs): - f = super().compute_filter(**kwargs) - uuid = kwargs.get("uuid") - if uuid is not None: - try: - uuid = UUID(uuid.strip()) - except TypeError: - pass - else: - f &= TAcquisitionFramework.unique_acquisition_framework_id == uuid - name = kwargs.get("name") - if name is not None: - f &= TAcquisitionFramework.acquisition_framework_name.ilike(f"%{name}%") - return f - @serializable class TDatasetDetails(TDatasets): diff --git a/backend/geonature/core/gn_meta/mtd/__init__.py b/backend/geonature/core/gn_meta/mtd/__init__.py index c091dc0889..b6278fa55a 100644 --- a/backend/geonature/core/gn_meta/mtd/__init__.py +++ b/backend/geonature/core/gn_meta/mtd/__init__.py @@ -1,43 +1,64 @@ from urllib.parse import urljoin +import logging + import requests from lxml import etree -from flask import current_app -from sqlalchemy.dialects.postgresql import insert as pg_insert -from sqlalchemy.sql import func +from datetime import * + +import time from geonature.utils.config import config from geonature.utils.env import db from geonature.core.gn_meta.models import ( - TAcquisitionFramework, - TDatasets, CorAcquisitionFrameworkActor, CorDatasetActor, ) from geonature.core.auth.routes import insert_user_and_org -from pypnusershub.db.models import User, Organisme +from pypnusershub.db.models import User + +from pypnnomenclature.models import TNomenclatures + +from .xml_parser import parse_acquisition_framework, parse_jdd_xml, parse_acquisition_framwork_xml +from .mtd_utils import sync_af, sync_ds, associate_actors -from .xml_parser import parse_acquisition_framework, parse_jdd_xml -from .mtd_utils import create_cor_object_actors, NOMENCLATURE_MAPPING +# create logger +logger = logging.getLogger("MTD_SYNC") +# config logger +logger.setLevel(config["MTD"]["SYNC_LOG_LEVEL"]) +handler = logging.StreamHandler() +formatter = logging.Formatter("%(asctime)s | %(levelname)s : %(message)s", "%Y-%m-%d %H:%M:%S") +handler.setFormatter(formatter) +logger.addHandler(handler) +# avoid logging output dupplication +logger.propagate = False class MTDInstanceApi: af_path = "/mtd/cadre/export/xml/GetRecordsByInstanceId?id={ID_INSTANCE}" ds_path = "/mtd/cadre/jdd/export/xml/GetRecordsByInstanceId?id={ID_INSTANCE}" + ds_user_path = "/mtd/cadre/jdd/export/xml/GetRecordsByUserId?id={ID_ROLE}" + single_af_path = "/mtd/cadre/export/xml/GetRecordById?id={ID_AF}" - def __init__(self, api_endpoint, instance_id): + # https://inpn.mnhn.fr/mtd/cadre/jdd/export/xml/GetRecordsByUserId?id=41542" + def __init__(self, api_endpoint, instance_id, id_role=None): self.api_endpoint = api_endpoint self.instance_id = instance_id + self.id_role = id_role - def _get_xml(self, path): - url = urljoin(self.api_endpoint, path) - url = url.format(ID_INSTANCE=self.instance_id) + def _get_xml_by_url(self, url): + logger.debug("MTD - REQUEST : %s" % url) response = requests.get(url) response.raise_for_status() return response.content + def _get_xml(self, path): + url = urljoin(self.api_endpoint, path) + url = url.format(ID_INSTANCE=self.instance_id) + return self._get_xml_by_url(url) + def _get_af_xml(self): return self._get_xml(self.af_path) @@ -57,6 +78,18 @@ def get_ds_list(self): xml = self._get_ds_xml() return parse_jdd_xml(xml) + def get_ds_user_list(self): + url = urljoin(self.api_endpoint, self.ds_user_path) + url = url.format(ID_ROLE=self.id_role) + xml = self._get_xml_by_url(url) + return parse_jdd_xml(xml) + + def get_user_af_list(self, af_uuid): + url = urljoin(self.api_endpoint, self.single_af_path) + url = url.format(ID_AF=af_uuid) + xml = self._get_xml_by_url(url) + return parse_acquisition_framwork_xml(xml) + class INPNCAS: base_url = config["CAS"]["CAS_USER_WS"]["BASE_URL"] @@ -77,74 +110,46 @@ def get_user(cls, user_id): def add_unexisting_digitizer(id_digitizer): + """ + Method to trigger global MTD sync. + + :param id_digitizer: as id role from meta info + """ if not db.session.query(User.query.filter_by(id_role=id_digitizer).exists()).scalar(): + # not fast - need perf optimization on user call user = INPNCAS.get_user(id_digitizer) + # to avoid to create org + if user.get("codeOrganisme"): + user["codeOrganisme"] = None + # insert or update user insert_user_and_org(user) -def add_or_update_organism(uuid, nom, email): - statement = ( - pg_insert(Organisme) - .values( - uuid_organisme=uuid, - nom_organisme=nom, - email_organisme=email, - ) - .on_conflict_do_update( - index_elements=["uuid_organisme"], - set_=dict( - nom_organisme=nom, - email_organisme=email, - ), - ) - .returning(Organisme.id_organisme) - ) - return db.session.execute(statement).scalar() +def process_af_and_ds(af_list, ds_list, id_role=None): + """ + Synchro AF, Synchro DS - -def associate_actors(actors, CorActor, pk_name, pk_value): - for actor in actors: - if not actor["uuid_organism"]: - continue - with db.session.begin_nested(): - id_organism = add_or_update_organism( - uuid=actor["uuid_organism"], - nom=actor["organism"] or "", - email=actor["email"], - ) - statement = ( - pg_insert(CorActor) - .values( - id_organism=id_organism, - id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( - "ROLE_ACTEUR", actor["actor_role"] - ), - **{pk_name: pk_value}, - ) - .on_conflict_do_nothing( - index_elements=[pk_name, "id_organism", "id_nomenclature_actor_role"], - ) - ) - db.session.execute(statement) - - -def sync_af_and_ds(): + :param af_list: list af + :param ds_list: list ds + :param id_role: use role id pass on user authent only + """ cas_api = INPNCAS() - mtd_api = MTDInstanceApi(config["MTD_API_ENDPOINT"], config["MTD"]["ID_INSTANCE_FILTER"]) - - af_list = mtd_api.get_af_list() + # read nomenclatures from DB to avoid errors if GN nomenclature is not the same + list_cd_nomenclature = [ + record[0] for record in db.session.query(TNomenclatures.cd_nomenclature).distinct() + ] + user_add_total_time = 0 + logger.debug("MTD - PROCESS AF LIST") for af in af_list: - with db.session.begin_nested(): - add_unexisting_digitizer(af["id_digitizer"]) actors = af.pop("actors") - statement = ( - pg_insert(TAcquisitionFramework) - .values(**af) - .on_conflict_do_update(index_elements=["unique_acquisition_framework_id"], set_=af) - .returning(TAcquisitionFramework.id_acquisition_framework) - ) - af_id = db.session.execute(statement).scalar() - af = TAcquisitionFramework.query.get(af_id) + with db.session.begin_nested(): + start_add_user_time = time.time() + if not id_role: + add_unexisting_digitizer(af["id_digitizer"]) + else: + add_unexisting_digitizer(id_role) + user_add_total_time += time.time() - start_add_user_time + af = sync_af(af) associate_actors( actors, CorAcquisitionFrameworkActor, @@ -153,30 +158,64 @@ def sync_af_and_ds(): ) # TODO: remove actors removed from MTD db.session.commit() - - ds_list = mtd_api.get_ds_list() + logger.debug("MTD - PROCESS DS LIST") for ds in ds_list: - with db.session.begin_nested(): - add_unexisting_digitizer(ds["id_digitizer"]) actors = ds.pop("actors") - af_uuid = ds.pop("uuid_acquisition_framework") - af = TAcquisitionFramework.query.filter_by(unique_acquisition_framework_id=af_uuid).first() - if af is None: - continue - ds["id_acquisition_framework"] = af.id_acquisition_framework - ds = { - k: func.ref_nomenclatures.get_id_nomenclature(NOMENCLATURE_MAPPING[k], v) - if k.startswith("id_nomenclature") - else v - for k, v in ds.items() - if v is not None - } - statement = ( - pg_insert(TDatasets) - .values(**ds) - .on_conflict_do_update(index_elements=["unique_dataset_id"], set_=ds) - ) - db.session.execute(statement) - ds = TDatasets.query.filter_by(unique_dataset_id=ds["unique_dataset_id"]).first() - associate_actors(actors, CorDatasetActor, "id_dataset", ds.id_dataset) + # CREATE DIGITIZER + with db.session.begin_nested(): + start_add_user_time = time.time() + if not id_role: + add_unexisting_digitizer(ds["id_digitizer"]) + else: + add_unexisting_digitizer(id_role) + user_add_total_time += time.time() - start_add_user_time + ds = sync_ds(ds, list_cd_nomenclature) + if ds is not None: + associate_actors(actors, CorDatasetActor, "id_dataset", ds.id_dataset) + + user_add_total_time = round(user_add_total_time, 2) db.session.commit() + + +def sync_af_and_ds(): + """ + Method to trigger global MTD sync. + """ + logger.info("MTD - SYNC GLOBAL : START") + mtd_api = MTDInstanceApi(config["MTD_API_ENDPOINT"], config["MTD"]["ID_INSTANCE_FILTER"]) + + af_list = mtd_api.get_af_list() + + ds_list = mtd_api.get_ds_list() + + # synchro a partir des listes + process_af_and_ds(af_list, ds_list) + logger.info("MTD - SYNC GLOBAL : FINISH") + + +def sync_af_and_ds_by_user(id_role): + """ + Method to trigger MTD sync on user authent. + """ + + logger.info("MTD - SYNC USER : START") + + mtd_api = MTDInstanceApi( + config["MTD_API_ENDPOINT"], config["MTD"]["ID_INSTANCE_FILTER"], id_role + ) + + ds_list = mtd_api.get_ds_user_list() + user_af_uuids = [ds["uuid_acquisition_framework"] for ds in ds_list] + + # TODO - voir avec INPN pourquoi les AF par user ne sont pas dans l'appel global des AF + # Ce code ne fonctionne pas pour cette raison -> AF manquants + # af_list = mtd_api.get_af_list() + # af_list = [af for af in af_list if af["unique_acquisition_framework_id"] in user_af_uuids] + + # call INPN API for each AF to retrieve info + af_list = [mtd_api.get_user_af_list(af_uuid) for af_uuid in user_af_uuids] + + # start AF and DS lists + process_af_and_ds(af_list, ds_list, id_role) + + logger.info("MTD - SYNC USER : FINISH") diff --git a/backend/geonature/core/gn_meta/mtd/mtd_utils.py b/backend/geonature/core/gn_meta/mtd/mtd_utils.py index 6ca4d0eaee..efa1ffbe94 100644 --- a/backend/geonature/core/gn_meta/mtd/mtd_utils.py +++ b/backend/geonature/core/gn_meta/mtd/mtd_utils.py @@ -4,9 +4,9 @@ from flask import current_app from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.sql import func +from sqlalchemy.sql import func, update -from geonature.utils.errors import GeonatureApiError +from sqlalchemy.dialects.postgresql import insert as pg_insert from geonature.utils.env import DB from geonature.core.gn_meta.models import ( @@ -19,7 +19,6 @@ from pypnusershub.db.models import Organisme as BibOrganismes from geonature.core.users import routes as users from geonature.core.auth.routes import insert_user_and_org, get_user_from_id_inpn_ws -from pypnusershub.db.models import User from .xml_parser import parse_acquisition_framwork_xml, parse_jdd_xml from .mtd_webservice import get_jdd_by_user_id, get_acquisition_framework, get_jdd_by_uuid @@ -35,318 +34,171 @@ log = logging.getLogger() -def create_cor_object_actors(actors, new_object): +def sync_ds(ds, cd_nomenclatures): """ - Create a new cor_dataset_actor/cor_acquisition_framework_actor object for the JDD/AF - Input : - actors (list) : List of all actors related to the JDD/AF - new_object : JDD or AF + Will create or update a given DS according to UUID. + Only process DS if dataset's cd_nomenclatures exists in ref_normenclatures.t_nomenclatures. + + :param ds: DS infos + :param cd_nomenclatures: cd_nomenclature from ref_normenclatures.t_nomenclatures """ - for act in actors: - # person = None - # id_person = None - org = None - id_organism = None + if ds["id_nomenclature_data_origin"] not in cd_nomenclatures: + return + + # CONTROL AF + af_uuid = ds.pop("uuid_acquisition_framework") + af = TAcquisitionFramework.query.filter_by(unique_acquisition_framework_id=af_uuid).first() + + if af is None: + return + + ds["id_acquisition_framework"] = af.id_acquisition_framework + ds = { + k: func.ref_nomenclatures.get_id_nomenclature(NOMENCLATURE_MAPPING[k], v) + if k.startswith("id_nomenclature") + else v + for k, v in ds.items() + if v is not None + } + + ds_exists = ( + TDatasets.query.filter_by(unique_dataset_id=ds["unique_dataset_id"]).first() is not None + ) - # For the moment wo do not match the user with the actor provided by the XML -> only the organism + if ds_exists: + statement = ( + update(TDatasets) + .where(TDatasets.unique_dataset_id == ds["unique_dataset_id"]) + .values(**ds) + ) + else: + statement = ( + pg_insert(TDatasets) + .values(**ds) + .on_conflict_do_nothing(index_elements=["unique_dataset_id"]) + ) + DB.session.execute(statement) + dataset = TDatasets.query.filter_by(unique_dataset_id=ds["unique_dataset_id"]).first() + + # Associate dataset to the modules if new dataset + if not ds_exists: + associate_dataset_modules(dataset) + + return dataset + + +def sync_af(af): + """ + Will create or update a given AF according to UUID. - # If the email of the contact Person was provided in the XML file, we try to link him to the t_role table - # if act["email"]: - # # We first check if the Person's email exists in the t_role table - # person = ( - # DB.session.query(User) - # .filter(User.email == act["email"]) - # .first() - # ) - # # If not, we create it as a new Person in the t_role table and get his ID back - # if not person: - # if act["uuid_organism"]: - # org = ( - # DB.session.query(BibOrganismes) - # .filter(BibOrganismes.uuid_organisme == act["uuid_organism"]) - # .first() - # ) - # person = { - # "id_role": None, - # "nom_role": act["name"], - # "email": act["email"], - # } - # if org: - # person['id_organisme'] = org.id_organisme - # resp = users.insert_role(person) - # id_person = json.loads(resp.data.decode('utf-8'))['id_role'] - # else: - # id_person = person.id_role + :param af: dict AF infos + """ + af_uuid = af["unique_acquisition_framework_id"] + af_exists = ( + TAcquisitionFramework.query.filter_by(unique_acquisition_framework_id=af_uuid).first() + is not None + ) + if af_exists: + # this avoid useless nextval sequence + statement = ( + update(TAcquisitionFramework) + .where(TAcquisitionFramework.unique_acquisition_framework_id == af_uuid) + .values(af) + .returning(TAcquisitionFramework.id_acquisition_framework) + ) + else: + statement = ( + pg_insert(TAcquisitionFramework) + .values(**af) + .on_conflict_do_nothing(index_elements=["unique_acquisition_framework_id"]) + .returning(TAcquisitionFramework.id_acquisition_framework) + ) + af_id = DB.session.execute(statement).scalar() + af = TAcquisitionFramework.query.get(af_id) + return af + + +def add_or_update_organism(uuid, nom, email): + """ + Create or update organism if UUID not exists in DB. - # If the informations about the Organism is provided, we try to link it to the bib_organismes table - if act["uuid_organism"] or act["organism"]: - # UUID in actually only present on JDD XML files - # Filter on UUID is preferable if available since it avoids dupes based on name changes - if act["uuid_organism"]: - org = ( - DB.session.query(BibOrganismes) - .filter(BibOrganismes.uuid_organisme == act["uuid_organism"]) - .first() - ) - else: - org = ( - DB.session.query(BibOrganismes) - .filter(BibOrganismes.nom_organisme == act["organism"]) - .first() - ) - # If no Organism was corresponding in the bib_organismes table, we add it - if not org: - org = BibOrganismes( - **{ - "nom_organisme": act["organism"], - "uuid_organisme": act["uuid_organism"], - } + :param uuid: uniq organism uuid + :param nom: org name + :param email: org email + """ + # Test if actor already exists to avoid nextVal increase + org = BibOrganismes.query.filter_by(uuid_organisme=uuid).first() is not None + if org: + statement = ( + update(BibOrganismes) + .where(BibOrganismes.uuid_organisme == uuid) + .values( + dict( + nom_organisme=nom, + email_organisme=email, ) - DB.session.add(org) - DB.session.commit() - id_organism = org.id_organisme - - # With at least the Person or the Organism was provided for the actor in the XML file, - # we build the data for the correlation - if id_organism: - dict_cor = { - "id_organism": id_organism, - "id_nomenclature_actor_role": func.ref_nomenclatures.get_id_nomenclature( - "ROLE_ACTEUR", act["actor_role"] - ), - } - - # We finally build the correlation corresponding to the JDD/AF - if isinstance(new_object, TAcquisitionFramework): - if not any( - map( - lambda cafa: dict_cor["id_organism"] == cafa.id_organism - and act["actor_role"] - == cafa.id_nomenclature_actor_role.clauses.clauses[1].value, - new_object.cor_af_actor, - ) - ): - cor_actor = CorAcquisitionFrameworkActor(**dict_cor) - new_object.cor_af_actor.append(cor_actor) - elif isinstance(new_object, TDatasets): - if not any( - map( - lambda ca: dict_cor["id_organism"] == ca.id_organism - and act["actor_role"] - == ca.id_nomenclature_actor_role.clauses.clauses[1].value, - new_object.cor_dataset_actor, - ) - ): - cor_actor = CorDatasetActor(**dict_cor) - new_object.cor_dataset_actor.append(cor_actor) + ) + .returning(BibOrganismes.id_organisme) + ) + else: + statement = ( + pg_insert(BibOrganismes) + .values( + uuid_organisme=uuid, + nom_organisme=nom, + email_organisme=email, + ) + .on_conflict_do_nothing(index_elements=["uuid_organisme"]) + .returning(BibOrganismes.id_organisme) + ) + return DB.session.execute(statement).scalar() -def post_acquisition_framework(uuid=None): - """ - Post an acquisition framwork from MTD XML - Params: - uuid (str): uuid of the acquisition framework +def associate_actors(actors, CorActor, pk_name, pk_value): """ - xml_af = None - xml_af = get_acquisition_framework(uuid) + Associate actor and DS or AF according to CorActor value. - if xml_af: - acquisition_framwork = parse_acquisition_framwork_xml(xml_af) - actors = acquisition_framwork.pop("actors") - new_af = TAcquisitionFramework(**acquisition_framwork) - id_acquisition_framework = TAcquisitionFramework.get_id(uuid) - # if the CA already exist in the DB - if id_acquisition_framework: - # delete cor_af_actor - new_af.id_acquisition_framework = id_acquisition_framework - - delete_q = CorAcquisitionFrameworkActor.__table__.delete().where( - CorAcquisitionFrameworkActor.id_acquisition_framework == id_acquisition_framework + :param actors: list of actors + :param CorActor: table model + :param pk_name: pk attribute name + :param pk_value: pk value + """ + for actor in actors: + if not actor["uuid_organism"]: + continue + # test if actor already exists + with DB.session.begin_nested(): + # create or update organisme + id_organism = add_or_update_organism( + uuid=actor["uuid_organism"], + nom=actor["organism"] or "", + email=actor["email"], ) - DB.session.execute(delete_q) - DB.session.commit() - create_cor_object_actors(actors, new_af) - DB.session.merge(new_af) - - # its a new AF - else: - create_cor_object_actors(actors, new_af) - # Add the new CA - DB.session.add(new_af) - # try to commit - try: - DB.session.commit() - # TODO catch db error ? - except SQLAlchemyError as e: - error_msg = "Error posting an aquisition framework\nTrace:\n{} \n\n ".format(e) - log.error(error_msg) - - return new_af.as_dict() + # Test if actor already exists to avoid nextVal increase + statement = ( + pg_insert(CorActor) + .values( + id_organism=id_organism, + id_nomenclature_actor_role=func.ref_nomenclatures.get_id_nomenclature( + "ROLE_ACTEUR", actor["actor_role"] + ), + **{pk_name: pk_value}, + ) + .on_conflict_do_nothing( + index_elements=[pk_name, "id_organism", "id_nomenclature_actor_role"], + ) + ) + DB.session.execute(statement) - return {"message": "Not found"}, 404 +def associate_dataset_modules(dataset): + """ + Associate a dataset to modules specified in [MTD][JDD_MODULE_CODE_ASSOCIATION] parameter (geonature config) -def add_dataset_module(dataset): + :param dataset: dataset (SQLAlchemy model object) + """ dataset.modules.extend( DB.session.query(TModules) .filter(TModules.module_code.in_(current_app.config["MTD"]["JDD_MODULE_CODE_ASSOCIATION"])) .all() ) - - -def post_jdd_from_user(id_user=None): - """Post a jdd from the mtd XML""" - xml_jdd = None - xml_jdd = get_jdd_by_user_id(id_user) - if xml_jdd: - dataset_list = parse_jdd_xml(xml_jdd) - posted_af_uuid = {} - for ds in dataset_list: - actors = ds.pop("actors") - # prevent to not fetch, post or merge the same acquisition framework multiple times - if ds["uuid_acquisition_framework"] not in posted_af_uuid: - new_af = post_acquisition_framework( - uuid=ds["uuid_acquisition_framework"], - ) - # build a cached dict like {'': 'id_acquisition_framework} - posted_af_uuid[ds["uuid_acquisition_framework"]] = new_af[ - "id_acquisition_framework" - ] - # get the id from the uuid - ds["id_acquisition_framework"] = posted_af_uuid.get(ds["uuid_acquisition_framework"]) - - ds.pop("uuid_acquisition_framework") - # get the id of the dataset to check if exists - id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) - ds["id_dataset"] = id_dataset - # search nomenclature - ds_copy = copy(ds) - for key, value in ds_copy.items(): - if key.startswith("id_nomenclature"): - response = DB.session.query( - func.ref_nomenclatures.get_id_nomenclature( - NOMENCLATURE_MAPPING.get(key), value - ) - ).one_or_none() - if response and response[0]: - ds[key] = response[0] - else: - ds.pop(key) - - #  set validable = true - ds["validable"] = True - dataset = TDatasets(**ds) - # if the dataset already exist - if id_dataset: - # delete cor_ds_actor - dataset.id_dataset = id_dataset - - delete_q = CorDatasetActor.__table__.delete().where( - CorDatasetActor.id_dataset == id_dataset - ) - DB.session.execute(delete_q) - DB.session.commit() - - # create the correlation links - create_cor_object_actors(actors, dataset) - add_dataset_module(dataset) - DB.session.merge(dataset) - - # its a new DS - else: - # set the dataset as activ - dataset.active = True - # create the correlation links - create_cor_object_actors(actors, dataset) - add_dataset_module(dataset) - # Add the new DS - DB.session.add(dataset) - # try to commit - try: - DB.session.commit() - # TODO catch db error ? - except SQLAlchemyError as e: - error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format(e) - log.error(error_msg) - - -def import_all_dataset_af_and_actors(table_name): - file_handler = logging.FileHandler("/tmp/uuid_ca.txt") - file_handler.setLevel(logging.CRITICAL) - log.addHandler(file_handler) - datasets = DB.engine.execute(f"SELECT * FROM {table_name}") - for d in datasets: - xml_jdd = get_jdd_by_uuid(str(d.unique_dataset_id)) - if xml_jdd: - ds_list = parse_jdd_xml(xml_jdd) - if ds_list: - ds = ds_list[0] - inpn_user = get_user_from_id_inpn_ws(ds["id_digitizer"]) - # get user info from id_digitizer - if inpn_user: - # insert user id digitizer - insert_user_and_org(inpn_user) - actors = ds.pop("actors") - # prevent to not fetch, post or merge the same acquisition framework multiple times - new_af = post_acquisition_framework( - uuid=ds["uuid_acquisition_framework"], - ) - # get the id from the uuid - ds["id_acquisition_framework"] = new_af["id_acquisition_framework"] - log.critical(str(new_af["id_acquisition_framework"]) + ",") - ds.pop("uuid_acquisition_framework") - # get the id of the dataset to check if exists - id_dataset = TDatasets.get_id(ds["unique_dataset_id"]) - ds["id_dataset"] = id_dataset - # search nomenclature - ds_copy = copy(ds) - for key, value in ds_copy.items(): - if key.startswith("id_nomenclature"): - if value is not None: - ds[key] = func.ref_nomenclatures.get_id_nomenclature( - NOMENCLATURE_MAPPING.get(key), value - ) - else: - ds.pop(key) - - #  set validable = true - ds["validable"] = True - dataset = TDatasets(**ds) - # if the dataset already exist - if id_dataset: - # delete cor_ds_actor - dataset.id_dataset = id_dataset - - delete_q = CorDatasetActor.__table__.delete().where( - CorDatasetActor.id_dataset == id_dataset - ) - DB.session.execute(delete_q) - DB.session.commit() - - # create the correlation links - create_cor_object_actors(actors, dataset) - add_dataset_module(dataset) - DB.session.merge(dataset) - - # its a new DS - else: - # set the dataset as activ - dataset.active = True - # create the correlation links - create_cor_object_actors(actors, dataset) - add_dataset_module(dataset) - # Add the new DS - DB.session.add(dataset) - # try to commit - try: - DB.session.commit() - # TODO catch db error ? - except SQLAlchemyError as e: - error_msg = "Error posting a dataset\nTrace:\n{} \n\n ".format(e) - print(error_msg) - else: - print("NO USER FOUND") - else: - "NO JDD IN XML ????" - else: - print("JDD NOT FOUND") diff --git a/backend/geonature/core/gn_meta/mtd/xml_parser.py b/backend/geonature/core/gn_meta/mtd/xml_parser.py index ef898462fc..98ced2625d 100644 --- a/backend/geonature/core/gn_meta/mtd/xml_parser.py +++ b/backend/geonature/core/gn_meta/mtd/xml_parser.py @@ -73,7 +73,6 @@ def parse_acquisition_framwork_xml(xml): def parse_acquisition_framework(ca): - # We extract all the required informations from the different tags of the XML file ca_uuid = get_tag_content(ca, "identifiantCadre") ca_name_max_length = TAcquisitionFramework.acquisition_framework_name.property.columns[ diff --git a/backend/geonature/core/gn_meta/repositories.py b/backend/geonature/core/gn_meta/repositories.py index db4bf678c1..1f482d6e06 100644 --- a/backend/geonature/core/gn_meta/repositories.py +++ b/backend/geonature/core/gn_meta/repositories.py @@ -16,7 +16,6 @@ from geonature.utils.env import DB from geonature.utils.errors import GeonatureApiError -from geonature.core.gn_permissions.tools import cruved_scope_for_user_in_module from geonature.core.gn_commons.models import cor_field_dataset, TAdditionalFields diff --git a/backend/geonature/core/gn_meta/routes.py b/backend/geonature/core/gn_meta/routes.py index ce0a5e30d9..bf03454541 100644 --- a/backend/geonature/core/gn_meta/routes.py +++ b/backend/geonature/core/gn_meta/routes.py @@ -4,33 +4,29 @@ import datetime as dt import json import logging -import threading -from pathlib import Path -from binascii import a2b_base64 - -import click from lxml import etree as ET from flask import ( Blueprint, current_app, request, - render_template, - send_from_directory, - copy_current_request_context, Response, g, ) + +import click + from flask.json import jsonify from sqlalchemy import inspect, and_, or_ from sqlalchemy.sql import text, exists, select, update from sqlalchemy.sql.functions import func -from sqlalchemy.orm import Load, joinedload, raiseload +from sqlalchemy.orm import Load, joinedload, raiseload, undefer from werkzeug.exceptions import Conflict, BadRequest, Forbidden, NotFound from werkzeug.datastructures import Headers, MultiDict from werkzeug.utils import secure_filename from marshmallow import ValidationError, EXCLUDE + from geonature.utils.config import config from geonature.utils.env import DB, db from geonature.core.gn_synthese.models import ( @@ -40,6 +36,8 @@ ) from geonature.core.gn_permissions.decorators import login_required +from .mtd import sync_af_and_ds as mtd_sync_af_and_ds, sync_af_and_ds_by_user + from ref_geo.models import LAreas from pypnnomenclature.models import TNomenclatures from pypnusershub.db.tools import InsufficientRightsError @@ -66,10 +64,7 @@ from utils_flask_sqla.response import json_resp, to_csv_resp, generate_csv_content from werkzeug.datastructures import Headers from geonature.core.gn_permissions import decorators as permissions -from geonature.core.gn_permissions.tools import ( - cruved_scope_for_user_in_module, - get_scopes_by_action, -) +from geonature.core.gn_permissions.tools import get_scopes_by_action from geonature.core.gn_meta.mtd import mtd_utils import geonature.utils.filemanager as fm import geonature.utils.utilsmails as mail @@ -90,12 +85,12 @@ def synchronize_mtd(): if request.endpoint in ["gn_meta.get_datasets", "gn_meta.get_acquisition_frameworks_list"]: try: - mtd_utils.post_jdd_from_user(id_user=g.current_user.id_role) + sync_af_and_ds_by_user(id_role=g.current_user.id_role) except Exception as e: log.exception("Error while get JDD via MTD") -@routes.route("/datasets", methods=["GET"]) +@routes.route("/datasets", methods=["GET", "POST"]) @login_required def get_datasets(): """ @@ -108,24 +103,60 @@ def get_datasets(): :returns: `list` """ params = MultiDict(request.args) - allowed_fields = {"modules"} - fields = params.pop("fields", None) + fields = params.get("fields", type=str, default=[]) if fields: fields = fields.split(",") - if set(fields) - allowed_fields: - raise BadRequest(f"Allowed fields: {','.join(allowed_fields)}") if "create" in params: query = TDatasets.query.filter_by_creatable(params.pop("create")) else: query = TDatasets.query.filter_by_readable() - query = query.filter_by_params(params) - data = [d.as_dict(fields=fields) for d in query.all()] + + if request.is_json: + query = query.filter_by_params(request.json) + + if "orderby" in params: + table_columns = TDatasets.__table__.columns + try: + orderCol = getattr(table_columns, params.pop("orderby")) + query = query.order_by(orderCol) + except AttributeError as exc: + raise BadRequest("the attribute to order on does not exist") from exc + + query = query.options( + Load(TDatasets).raiseload("*"), + joinedload("cor_dataset_actor").options( + joinedload("role"), + joinedload("organism"), + ), + # next relationships are joined for permission checks purpose: + joinedload("acquisition_framework").options( + joinedload("cor_af_actor"), + ), + ) + only = [ + "+cruved", + "cor_dataset_actor", + "cor_dataset_actor.nomenclature_actor_role", + "cor_dataset_actor.organism", + "cor_dataset_actor.role", + ] + + if params.get("synthese_records_count", type=int, default=0): + query = query.options(undefer(TDatasets.synthese_records_count)) + only.append("+synthese_records_count") + + if "modules" in fields: + query = query.options(joinedload("modules")) + only.append("modules") + + dataset_schema = DatasetSchema(only=only) + + # detect mobile app to enable retro-compatibility hacks user_agent = request.headers.get("User-Agent") - if ( - user_agent and user_agent.split("/")[0].lower() == "okhttp" - ): # retro-compatibility for mobile app - return jsonify({"data": data}) - return jsonify(data) + mobile_app = user_agent and user_agent.split("/")[0].lower() == "okhttp" + dataset_schema.context["mobile_app"] = mobile_app + + return dataset_schema.jsonify(query.all(), many=True) def get_af_from_id(id_af, af_list): @@ -155,6 +186,7 @@ def get_dataset(scope, id_dataset): dataset_schema = DatasetSchema( only=[ + "+cruved", "creator", "cor_dataset_actor", "cor_dataset_actor.nomenclature_actor_role", @@ -177,31 +209,9 @@ def get_dataset(scope, id_dataset): "sources", ] ) - - # TODO: Replace with get_scopes_by_action - # check this in front - user_cruved = cruved_scope_for_user_in_module( - id_role=g.current_user.id_role, - module_code="METADATA", - )[0] - dataset_schema.context = {"user_cruved": user_cruved} - return dataset_schema.jsonify(dataset) -@routes.route("/upload_canvas", methods=["POST"]) -def upload_canvas(): - """Upload the canvas as a temporary image used while generating the pdf file""" - data = request.data[22:] - filepath = str(Path(current_app.static_folder) / "images" / "taxa.png") - fm.remove_file(filepath) - if data: - binary_data = a2b_base64(data) - with open(filepath, "wb") as fd: - fd.write(binary_data) - return "", 204 - - @routes.route("/dataset/", methods=["DELETE"]) @permissions.check_cruved_scope("D", get_scope=True, module_code="METADATA") def delete_dataset(scope, ds_id): @@ -399,7 +409,6 @@ def sensi_report(): def my_csv_resp(filename, data, columns, _header, separator=";"): - headers = Headers() headers.add("Content-Type", "text/plain") headers.add("Content-Disposition", "attachment", filename="export_%s.csv" % filename) @@ -450,7 +459,7 @@ def update_dataset(id_dataset, scope): return DatasetSchema().jsonify(datasetHandler(dataset=dataset, data=request.get_json())) -@routes.route("/dataset/export_pdf/", methods=["GET"]) +@routes.route("/dataset/export_pdf/", methods=["GET", "POST"]) @permissions.check_cruved_scope("E", get_scope=True, module_code="METADATA") def get_export_pdf_dataset(id_dataset, scope): """ @@ -459,17 +468,18 @@ def get_export_pdf_dataset(id_dataset, scope): dataset = TDatasets.query.get_or_404(id_dataset) if not dataset.has_instance_permission(scope=scope): raise Forbidden("Vous n'avez pas les droits d'exporter ces informations") - dataset_schema = DatasetSchema( only=[ "nomenclature_data_type", "nomenclature_dataset_objectif", "nomenclature_collecting_method", "acquisition_framework", + "cor_dataset_actor.nomenclature_actor_role", + "cor_dataset_actor.organism", + "cor_dataset_actor.role", ] ) dataset = dataset_schema.dump(dataset) - if len(dataset.get("dataset_desc")) > 240: dataset["dataset_desc"] = dataset.get("dataset_desc")[:240] + "..." @@ -486,37 +496,26 @@ def get_export_pdf_dataset(id_dataset, scope): "url": current_app.config["URL_APPLICATION"] + "/#/metadata/dataset_detail/" + id_dataset, "date": date, } - - filename = "jdd_{}_{}_{}.pdf".format( - id_dataset, - secure_filename(dataset["dataset_shortname"]), - dt.datetime.now().strftime("%d%m%Y_%H%M%S"), - ) - - dataset["chart"] = (Path(current_app.static_folder) / "images" / "taxa.png").exists() - - # Appel de la methode pour generer un pdf - pdf_file = fm.generate_pdf("dataset_template_pdf.html", dataset, filename) - pdf_file_posix = Path(pdf_file) - - return send_from_directory(str(pdf_file_posix.parent), pdf_file_posix.name, as_attachment=True) + # chart + if request.is_json and request.json is not None: + dataset["chart"] = request.json["chart"] + # create PDF file + pdf_file = fm.generate_pdf("dataset_template_pdf.html", dataset) + return current_app.response_class(pdf_file, content_type="application/pdf") @routes.route("/acquisition_frameworks", methods=["GET", "POST"]) -@permissions.check_cruved_scope( - "R", - get_scope=True, -) -def get_acquisition_frameworks(scope): +@login_required +def get_acquisition_frameworks(): """ Get a simple list of AF without any nested relationships Use for AF select in form Get the GeoNature CRUVED """ - only = [] + only = ["+cruved"] # QUERY af_list = TAcquisitionFramework.query.filter_by_readable() - if request.method == "POST": + if request.is_json: af_list = af_list.filter_by_params(request.json) af_list = af_list.order_by(TAcquisitionFramework.acquisition_framework_name).options( @@ -538,7 +537,7 @@ def get_acquisition_frameworks(scope): if request.args.get("datasets", default=False, type=int): only.extend( [ - "t_datasets", + "t_datasets.+cruved", ] ) if request.args.get("creator", default=False, type=int): @@ -575,11 +574,6 @@ def get_acquisition_frameworks(scope): ), ) af_schema = AcquisitionFrameworkSchema(only=only) - user_cruved = cruved_scope_for_user_in_module( - id_role=g.current_user.id_role, - module_code="METADATA", - )[0] - af_schema.context = {"user_cruved": user_cruved} return af_schema.jsonify(af_list.all(), many=True) @@ -591,6 +585,8 @@ def get_acquisition_frameworks_list(scope): Use in metadata module for list of AF and DS Add the CRUVED permission for each row (Dataset and AD) + DEPRECATED use get_acquisition_frameworks instead + .. :quickref: Metadata; :qparam list excluded_fields: fields excluded from serialization @@ -603,10 +599,6 @@ def get_acquisition_frameworks_list(scope): if "selector" not in params: params["selector"] = None - user_cruved = cruved_scope_for_user_in_module( - id_role=g.current_user.id_role, - module_code="METADATA", - )[0] nested_serialization = params.get("nested", False) nested_serialization = True if nested_serialization == "true" else False exclude_fields = [] @@ -617,20 +609,22 @@ def get_acquisition_frameworks_list(scope): # exclude all relationships from serialization if nested = false exclude_fields = [db_rel.key for db_rel in inspect(TAcquisitionFramework).relationships] - acquisitionFrameworkSchema = AcquisitionFrameworkSchema(exclude=exclude_fields) - acquisitionFrameworkSchema.context = {"user_cruved": user_cruved} + acquisitionFrameworkSchema = AcquisitionFrameworkSchema( + only=["+cruved"], exclude=exclude_fields + ) return acquisitionFrameworkSchema.jsonify( get_metadata_list(g.current_user, scope, params, exclude_fields).all(), many=True ) -@routes.route("/acquisition_frameworks/export_pdf/", methods=["GET"]) +@routes.route( + "/acquisition_frameworks/export_pdf/", methods=["POST", "GET"] +) @permissions.check_cruved_scope("E", module_code="METADATA") def get_export_pdf_acquisition_frameworks(id_acquisition_framework): """ Get a PDF export of one acquisition """ - # Recuperation des données af = DB.session.query(TAcquisitionFrameworkDetails).get(id_acquisition_framework) acquisition_framework = af.as_dict(True, depth=2) @@ -671,6 +665,9 @@ def get_export_pdf_acquisition_frameworks(id_acquisition_framework): "nb_habitats": nb_habitat, } + if request.is_json and request.json is not None: + acquisition_framework["chart"] = request.json["chart"] + if acquisition_framework: acquisition_framework[ "nomenclature_territorial_level" @@ -699,8 +696,6 @@ def get_export_pdf_acquisition_frameworks(id_acquisition_framework): + id_acquisition_framework, "date": date, } - params = {"id_acquisition_frameworks": id_acquisition_framework} - else: return ( render_template( @@ -714,26 +709,11 @@ def get_export_pdf_acquisition_frameworks(id_acquisition_framework): acquisition_framework["initial_closing_date"] = af.initial_closing_date.strftime( "%d-%m-%Y %H:%M" ) - filename = "{}_{}_{}.pdf".format( - id_acquisition_framework, - secure_filename(acquisition_framework["acquisition_framework_name"][0:31]), - af.initial_closing_date.strftime("%d%m%Y_%H%M%S"), - ) acquisition_framework["closed_title"] = current_app.config["METADATA"]["CLOSED_AF_TITLE"] - else: - filename = "{}_{}_{}.pdf".format( - id_acquisition_framework, - secure_filename(acquisition_framework["acquisition_framework_name"][0:31]), - dt.datetime.now().strftime("%d%m%Y_%H%M%S"), - ) - # Appel de la methode pour generer un pdf - pdf_file = fm.generate_pdf( - "acquisition_framework_template_pdf.html", acquisition_framework, filename - ) - pdf_file_posix = Path(pdf_file) - return send_from_directory(str(pdf_file_posix.parent), pdf_file_posix.name, as_attachment=True) + pdf_file = fm.generate_pdf("acquisition_framework_template_pdf.html", acquisition_framework) + return current_app.response_class(pdf_file, content_type="application/pdf") @routes.route("/acquisition_framework/", methods=["GET"]) @@ -758,6 +738,7 @@ def get_acquisition_framework(scope, id_acquisition_framework): try: af_schema = AcquisitionFrameworkSchema( only=[ + "+cruved", "creator", "nomenclature_territorial_level", "nomenclature_financing_type", @@ -780,13 +761,6 @@ def get_acquisition_framework(scope, id_acquisition_framework): ) except ValueError as e: raise BadRequest(str(e)) - - user_cruved = cruved_scope_for_user_in_module( - id_role=g.current_user.id_role, - module_code="METADATA", - )[0] - af_schema.context = {"user_cruved": user_cruved} - return af_schema.jsonify(af) @@ -814,18 +788,16 @@ def delete_acquisition_framework(scope, af_id): def acquisitionFrameworkHandler(request, *, acquisition_framework): - # Test des droits d'édition du acquisition framework si modification if acquisition_framework.id_acquisition_framework is not None: - user_cruved = get_scopes_by_action(module_code="META_DATA") + user_cruved = get_scopes_by_action(module_code="METADATA") # verification des droits d'édition pour le acquisition framework if not acquisition_framework.has_instance_permission(user_cruved["U"]): - raise InsufficientRightsError( + raise Forbidden( "User {} has no right in acquisition_framework {}".format( g.current_user, acquisition_framework.id_acquisition_framework - ), - 403, + ) ) else: acquisition_framework.id_digitizer = g.current_user.id_role @@ -873,7 +845,7 @@ def updateAcquisitionFramework(id_acquisition_framework, scope): if not af.has_instance_permission(scope=scope): raise Forbidden( f"User {g.current_user} cannot update " - "acquisition framework {af.id_acquisition_framework}" + f"acquisition framework {af.id_acquisition_framework}" ) return AcquisitionFrameworkSchema().dump( acquisitionFrameworkHandler(request=request, acquisition_framework=af) @@ -902,9 +874,9 @@ def get_acquisition_framework_stats(id_acquisition_framework): .distinct() .count() ) - nb_observations = ( - DB.session.query(Synthese.cd_nom).filter(Synthese.id_dataset.in_(dataset_ids)).count() - ) + nb_observations = Synthese.query.filter( + Synthese.dataset.has(TDatasets.id_acquisition_framework == id_acquisition_framework) + ).count() nb_habitat = 0 # Check if pr_occhab exist @@ -1068,5 +1040,14 @@ def publish_acquisition_framework(af_id): @routes.cli.command() -def mtd_sync(): - mtd_sync_af_and_ds() +@click.argument("id_role", nargs=1, required=False, default=None) +def mtd_sync(id_role): + """ + Trigger global sync or a sync for a given user only. + + :param id_role: user id + """ + if id_role: + return sync_af_and_ds_by_user(id_role) + else: + return mtd_sync_af_and_ds() diff --git a/backend/geonature/core/gn_meta/schemas.py b/backend/geonature/core/gn_meta/schemas.py index 02ce7b9239..0567d2acd7 100644 --- a/backend/geonature/core/gn_meta/schemas.py +++ b/backend/geonature/core/gn_meta/schemas.py @@ -10,6 +10,7 @@ TBibliographicReference, ) from geonature.utils.env import MA +from geonature.utils.schema import CruvedSchemaMixin from geonature.core.gn_commons.models import TModules from geonature.core.gn_commons.schemas import ModuleSchema from geonature.core.gn_synthese.schemas import SourceSchema @@ -20,15 +21,6 @@ from pypnnomenclature.schemas import NomenclatureSchema -class CruvedSchemaMixin: - cruved = fields.Method("get_user_cruved") - - def get_user_cruved(self, obj): - if "user_cruved" in self.context: - return obj.get_object_cruved(self.context["user_cruved"]) - return None - - class DatasetActorSchema(SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema): class Meta: model = CorDatasetActor @@ -52,6 +44,8 @@ class Meta: load_instance = True include_fk = True + __module_code__ = "METADATA" + meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) cor_dataset_actor = MA.Nested(DatasetActorSchema, many=True, unknown=EXCLUDE) @@ -70,7 +64,7 @@ class Meta: acquisition_framework = MA.Nested("AcquisitionFrameworkSchema", dump_only=True) sources = MA.Nested(SourceSchema, many=True, dump_only=True) - @post_dump(pass_original=True) + @post_dump(pass_many=False, pass_original=True) def module_input(self, item, original, many, **kwargs): if "modules" in item: for i, module in enumerate(original.modules): @@ -88,10 +82,20 @@ def module_input(self, item, original, many, **kwargs): ) return item + # retro-compatibility with mobile app + @post_dump(pass_many=True, pass_original=True) + def mobile_app_compat(self, data, original, many, **kwargs): + if self.context.get("mobile_app"): + if many: + for ds, orig_ds in zip(data, original): + ds["meta_create_date"] = str(orig_ds.meta_create_date) + data = {"data": data} + else: + data["meta_create_date"] = str(original.meta_create_date) + return data -class BibliographicReferenceSchema( - CruvedSchemaMixin, SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema -): + +class BibliographicReferenceSchema(SmartRelationshipsMixin, MA.SQLAlchemyAutoSchema): class Meta: model = TBibliographicReference load_instance = True @@ -132,6 +136,8 @@ class Meta: load_instance = True include_fk = True + __module_code__ = "METADATA" + meta_create_date = fields.DateTime(dump_only=True) meta_update_date = fields.DateTime(dump_only=True) t_datasets = MA.Nested(DatasetSchema, many=True) diff --git a/backend/geonature/core/gn_permissions/admin.py b/backend/geonature/core/gn_permissions/admin.py new file mode 100644 index 0000000000..4510d80c07 --- /dev/null +++ b/backend/geonature/core/gn_permissions/admin.py @@ -0,0 +1,597 @@ +from flask import url_for, has_app_context, Markup, request +from flask_admin.contrib.sqla import ModelView +from flask_admin.contrib.sqla.filters import FilterEqual +import sqlalchemy as sa +from flask_admin.contrib.sqla.tools import get_primary_key +from flask_admin.contrib.sqla.fields import QuerySelectField +from flask_admin.contrib.sqla.ajax import QueryAjaxModelLoader +from flask_admin.form.widgets import Select2Widget +from sqlalchemy.orm import contains_eager, joinedload + +from geonature.utils.env import db +from geonature.core.admin.admin import admin +from geonature.core.admin.utils import CruvedProtectedMixin, DynamicOptionsMixin +from geonature.core.gn_permissions.models import ( + PermObject, + PermAction, + PermScope, + Permission, + PermissionAvailable, + PermFilter, +) +from geonature.core.gn_permissions.tools import get_permissions +from geonature.core.gn_commons.models.base import TModules + +from pypnusershub.db.models import User + + +### Filters + + +class RoleFilter(DynamicOptionsMixin, FilterEqual): + def get_dynamic_options(self, view): + if has_app_context(): + yield from [(u.id_role, u.nom_complet) for u in User.query.all()] + + +class ModuleFilter(DynamicOptionsMixin, FilterEqual): + def get_dynamic_options(self, view): + if has_app_context(): + yield from [ + (m.id_module, m.module_code) + for m in TModules.query.order_by(TModules.module_code).all() + ] + + +class ObjectFilter(DynamicOptionsMixin, FilterEqual): + def get_dynamic_options(self, view): + if has_app_context(): + yield from [(o.id_object, o.code_object) for o in PermObject.query.all()] + + +class ActionFilter(DynamicOptionsMixin, FilterEqual): + def get_dynamic_options(self, view): + if has_app_context(): + yield from [(a.id_action, a.code_action) for a in PermAction.query.all()] + + +class ScopeFilter(DynamicOptionsMixin, FilterEqual): + def apply(self, query, value, alias=None): + column = self.get_column(alias) + if value: + return query.filter(column == value) + else: + return query.filter(column.is_(None)) + + def get_dynamic_options(self, view): + if has_app_context(): + yield (None, "Sans restriction") + yield from [(a.value, a.label) for a in PermScope.query.all()] + + +### Formatters + + +def filters_formatter(v, c, m, p): + filters = [] + if m.scope: + filters.append(m.scope.label) + if m.sensitivity_filter: + filters.append("Données non sensibles") + return Markup("
    " + "".join(["
  • {}
  • ".format(f) for f in filters]) + "
") + + +def modules_formatter(view, context, model, name): + modules = [ + '{}'.format( + url_for("tmodules.details_view", id=module.id_module), + Markup.escape(module.module_code), + ) + for module in model.modules + ] + return Markup(", ".join(modules)) + + +def groups_formatter(view, context, model, name): + groups = [ + '{}'.format( + url_for("permissions/group.details_view", id=group.id_role), Markup.escape(group) + ) + for group in model.groups + ] + return Markup(", ".join(groups)) + + +def role_formatter(view, context, model, name): + role = model.role + if role.groupe: + url = url_for("permissions/group.details_view", id=role.id_role) + nom = "{}".format(Markup.escape(role.nom_role)) + else: + url = url_for("permissions/user.details_view", id=role.id_role) + nom = Markup.escape(role.nom_complet) + return Markup('{}'.format(url, nom)) + + +def permissions_formatter(view, context, model, name): + available_permissions = PermissionAvailable.query.nice_order().all() + + o = "" + columns = ["Module", "Object", "Action", "Label"] + if model.groupe: + return_url = url_for("permissions/group.details_view", id=model.id_role) + columns += ["Permissions"] + else: + return_url = url_for("permissions/user.details_view", id=model.id_role) + columns += ["Permissions personnelles", "Permissions effectives"] + o += "" + "".join([f"" for col in columns]) + "" + o += "" + for ap in available_permissions: + own_permissions = list( + filter( + lambda p: p.module == ap.module + and p.object == ap.object + and p.action == ap.action, + model.permissions, + ) + ) + permissions = [(own_permissions, True)] + if not model.groupe: + effective_permissions = list( + get_permissions( + id_role=model.id_role, + module_code=ap.module.module_code, + object_code=ap.object.code_object, + action_code=ap.action.code_action, + ) + ) + permissions.append((effective_permissions, False)) + o += ( + "" + if own_permissions or effective_permissions + else "" + ) + else: + o += "" if own_permissions else "" + + o += "".join( + [ + f"" + for col in [ + ap.module.module_code, + ap.object.code_object, + ap.action.code_action, + ap.label, + ] + ] + ) + for perms, managable in permissions: + o += "" + o += "" + o += "" + o += "
{col}
{col}" + if perms: + if len(perms) > 1: + o += f"{len(perms)} permissions :" + o += '' + for perm in perms: + flts = perm.filters + o += "" + if not flts: + o += '" + o += "
' + else: + o += '' + o += """
""" + if not flts: + o += """""" + else: + o += """
    """ + for flt_name in perm.availability.filters: + flt_field = Permission.filters_fields[flt_name] + flt = PermFilter(flt_name, getattr(perm, flt_field.name)) + o += f"""
  • {flt}
  • """ + o += "
" + o += """
""" + if managable: + o += """
""" + edit_url = url_for( + "permissions/permission.edit_view", + id=perm.id_permission, + url=return_url, + ) + delete_url = url_for( + "permissions/permission.delete_view", + id=perm.id_permission, + url=return_url, + ) + o += f"""
""" + if len(ap.filters) > 0: + o += ( + f"""""" + """""" + """""" + ) + o += ( + """" + "
" + ) + o += """
""" + o += "
" + if managable and (not perms or len(ap.filters) > 1): + add_url = url_for( + "permissions/permission.create_view", + id_role=model.id_role, + module_code=ap.module.module_code, + code_object=ap.object.code_object, + code_action=ap.action.code_action, + url=return_url, + ) + o += ( + f"""""" + """""" + """""" + ) + o += "
" + return Markup(o) + + +def permissions_count_formatter(view, context, model, name): + url = url_for("permissions/permission.index_view", flt1_rle_equals=model.id_role) + return Markup(f'{len(model.permissions)}') + + +### Widgets + + +class OptionSelect2Widget(Select2Widget): + @classmethod + def render_option(cls, value, label, options): + return super().render_option(value, label, options.pop("selected"), **options) + + +### Fields + + +class OptionQuerySelectField(QuerySelectField): + """ + Overrides the QuerySelectField class from flask admin to allow + other attributes on a select option. + + options_additional_values is added in form_args, it is a list of + strings, each element is the name of the attribute in the model + which will be added on the option + """ + + widget = OptionSelect2Widget() + + def __init__(self, *args, **kwargs): + self.options_additional_values = kwargs.pop("options_additional_values") + super().__init__(*args, **kwargs) + + def iter_choices(self): + if self.allow_blank: + yield ("__None", self.blank_text, {"selected": self.data is None}) + for pk, obj in self._get_object_list(): + options = {k: getattr(obj, k) for k in self.options_additional_values} + options["selected"] = obj == self.data + yield (pk, self.get_label(obj), options) + + +### ModelLoader + + +class UserAjaxModelLoader(QueryAjaxModelLoader): + def format(self, user): + if not user: + return None + + def format_availability(availability): + return ":".join( + [str(getattr(availability, attr)) for attr in get_primary_key(PermissionAvailable)] + ) + + def filter_availability(availability): + filters_count = sum( + [ + getattr(availability, field.name) + for field in PermissionAvailable.filters_fields.values() + ] + ) + return filters_count < 2 + + availabilities = {p.availability for p in user.permissions if p.availability} + excluded_availabilities = filter(filter_availability, availabilities) + excluded_availabilities = map(format_availability, excluded_availabilities) + return super().format(user) + (list(excluded_availabilities),) + + def get_query(self): + return ( + super() + .get_query() + .options(joinedload(User.permissions).joinedload(Permission.availability)) + .order_by(User.groupe.desc(), User.nom_role) + ) + + +### ModelViews + + +class ObjectAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "PERMISSIONS" + + can_create = False + can_edit = False + can_delete = False + + column_list = ("code_object", "description_object", "modules") + column_labels = { + "code_object": "Code", + "description_object": "Description", + } + column_default_sort = "id_object" + column_formatters = { + "modules": modules_formatter, + } + + +# self.allow_blank = (True,) +# self.blank_test = "lalala" +class PermissionAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "PERMISSIONS" + + column_list = ("role", "module", "object", "action", "label", "filters") + column_labels = { + "role": "Rôle", + "filters": "Restriction(s)", + "object": "Objet", + "role.identifiant": "identifiant du rôle", + "role.nom_complet": "nom du rôle", + "availability": "Permission", + "scope": "Filtre sur l'appartenance des données", + "sensitivity_filter": "Exclure les données sensibles", + } + column_select_related_list = ("availability",) + column_searchable_list = ("role.identifiant", "role.nom_complet") + column_formatters = { + "role": role_formatter, + "module": lambda v, c, m, p: m.module.module_code, + "object": lambda v, c, m, p: m.object.code_object, + "label": lambda v, c, m, p: m.availability.label if m.availability else None, + "filters": filters_formatter, + } + column_filters = ( + RoleFilter(column=Permission.id_role, name="Rôle"), + ModuleFilter(column=Permission.id_module, name="Module"), + ObjectFilter(column=Permission.id_object, name="Objet"), + ActionFilter(column=Permission.id_action, name="Action"), + ScopeFilter(column=Permission.scope_value, name="Scope"), + ) + named_filter_urls = True + column_sortable_list = ( + ("role", "role.nom_complet"), + ("module", "module.module_code"), + ("object", "object.code_object"), + ("action", "action.code_action"), + ) + column_default_sort = [ + ("role.nom_complet", False), + ("module.module_code", False), + ("object.code_object", False), + ("id_action", False), + ] + form_columns = ("role", "availability", "scope", "sensitivity_filter") + form_overrides = dict( + availability=OptionQuerySelectField, + ) + form_args = dict( + availability=dict( + query_factory=lambda: PermissionAvailable.query.nice_order(), + options_additional_values=["sensitivity_filter", "scope_filter"], + ), + ) + create_template = "admin/hide_select2_options_create.html" + edit_template = "admin/hide_select2_options_edit.html" + form_ajax_refs = { + "role": UserAjaxModelLoader( + "role", + db.session, + User, + fields=( + "identifiant", + "nom_role", + "prenom_role", + ), + placeholder="Veuillez sélectionner un utilisateur ou un groupe", + minimum_input_length=0, + ), + } + + def render(self, template, **kwargs): + self.extra_js = [url_for("static", filename="js/hide_unnecessary_filters.js")] + return super().render(template, **kwargs) + + def create_form(self): + form = super().create_form() + if request.method == "GET": + # Set default values from request.args + if "id_role" in request.args: + form.role.data = User.query.get(request.args.get("id_role", type=int)) + if {"module_code", "code_object", "code_action"}.issubset(request.args.keys()): + form.availability.data = ( + PermissionAvailable.query.join(PermissionAvailable.module) + .join(PermissionAvailable.object) + .join(PermissionAvailable.action) + .filter( + TModules.module_code == request.args.get("module_code"), + PermObject.code_object == request.args.get("code_object"), + PermAction.code_action == request.args.get("code_action"), + ) + .one_or_none() + ) + return form + + +class PermissionAvailableAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "PERMISSIONS" + + can_create = False + can_delete = False + can_export = False + + column_labels = { + "scope": "Portée", + "object": "Objet", + "scope_filter": "Filtre appartenance", + "sensitivity_filter": "Filtre sensibilité", + } + column_formatters = { + "module": lambda v, c, m, p: m.module.module_code, + "object": lambda v, c, m, p: m.object.code_object, + } + column_sortable_list = ( + ("module", "module.module_code"), + ("object", "object.code_object"), + ("action", "action.code_action"), + ) + column_filters = (ModuleFilter(column=PermissionAvailable.id_module, name="Module"),) + column_default_sort = [ + ("module.module_code", False), + ("object.code_object", False), + ("id_action", False), + ] + form_columns = ("scope_filter", "sensitivity_filter") + + +class RolePermAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "PERMISSIONS" + + can_create = False + can_edit = False + can_delete = False + can_export = False + can_view_details = True + + details_template = "role_or_group_detail.html" + column_select_related_list = ("permissions",) + column_labels = { + "nom_role": "Nom", + "prenom_role": "Prénom", + "groups": "Groupes", + "permissions": "Permissions", + "permissions_count": "Nombre de permissions", + } + column_searchable_list = ("identifiant", "nom_complet") + column_formatters = { + "groups": groups_formatter, + "permissions_count": permissions_count_formatter, + } + column_formatters_detail = { + "groups": groups_formatter, + "permissions": permissions_formatter, + "permissions_count": permissions_count_formatter, + } + + +class GroupPermAdmin(RolePermAdmin): + column_list = ( + "nom_role", + "permissions_count", + ) + column_details_list = ("nom_role", "permissions_count", "permissions") + + def get_query(self): + return User.query.filter_by(groupe=True).filter_by_app() + + def get_count_query(self): + return self.session.query(sa.func.count("*")).filter(User.groupe == True) + + +class UserPermAdmin(RolePermAdmin): + column_list = ( + "identifiant", + "nom_role", + "prenom_role", + "groups", + "permissions_count", + ) + column_labels = { + **RolePermAdmin.column_labels, + "permissions_count": "Nombre de permissions non héritées", + } + column_details_list = ( + "identifiant", + "nom_role", + "prenom_role", + "groups", + "permissions_count", + "permissions", + ) + + def get_query(self): + return User.query.filter_by(groupe=False).filter_by_app() + + def get_count_query(self): + # FIXME : must filter by app + return self.session.query(sa.func.count("*")).filter(User.groupe == False) + + +admin.add_view( + GroupPermAdmin( + User, + db.session, + name="Par groupes", + category="Permissions", + endpoint="permissions/group", + ) +) + + +admin.add_view( + UserPermAdmin( + User, + db.session, + name="Par utilisateurs", + category="Permissions", + endpoint="permissions/user", + ) +) + +# Retirer pour plus de lisibilité de l'interface des permissions +# admin.add_view( +# ObjectAdmin( +# PermObject, +# db.session, +# name="Objets", +# category="Permissions", +# endpoint="permissions/object", +# ) +# ) + + +admin.add_view( + PermissionAdmin( + Permission, + db.session, + name="Permissions", + category="Permissions", + endpoint="permissions/permission", + ) +) + +# Retirer pour plus de lisibilité de l'interface des permissions +# admin.add_view( +# PermissionAvailableAdmin( +# PermissionAvailable, +# db.session, +# name="Permissions disponibles", +# category="Permissions", +# endpoint="permissions/availablepermission", +# ) +# ) diff --git a/backend/geonature/core/gn_permissions/backoffice/forms.py b/backend/geonature/core/gn_permissions/backoffice/forms.py deleted file mode 100644 index 1ca8f4c471..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/forms.py +++ /dev/null @@ -1,97 +0,0 @@ -from flask_wtf import FlaskForm - - -from wtforms import ( - StringField, - PasswordField, - BooleanField, - SubmitField, - HiddenField, - SelectField, - RadioField, - SelectMultipleField, - widgets, - Form, -) -from wtforms.validators import DataRequired, Email -from wtforms.widgets import TextArea -from wtforms_sqlalchemy.fields import QuerySelectField - - -from geonature.core.gn_permissions.models import TFilters, BibFiltersType, TActions -from geonature.core.gn_commons.models import TModules -from geonature.utils.env import DB - - -class CruvedScopeForm(FlaskForm): - """ - Forms to manage cruved scope permissions - """ - - C = RadioField("create_scope") - R = RadioField("read_scope") - U = RadioField("update_scope") - V = RadioField("validate_scope") - E = RadioField("edit_scope") - D = RadioField("delete_scope") - submit = SubmitField("Valider") - - def init_choices(self): - """ - Get and set the scope filters to the form choices - """ - data = ( - DB.session.query(TFilters.id_filter, TFilters.description_filter) - .join(BibFiltersType, BibFiltersType.id_filter_type == TFilters.id_filter_type) - .filter(BibFiltersType.code_filter_type == "SCOPE") - .all() - ) - scope_choices = [(str(code), value) for code, value in data] - self.C.choices = scope_choices - self.R.choices = scope_choices - self.U.choices = scope_choices - self.V.choices = scope_choices - self.E.choices = scope_choices - self.D.choices = scope_choices - - def __init__(self, *args, **kwargs): - super(CruvedScopeForm, self).__init__(*args, **kwargs) - self.init_choices() - - -class OtherPermissionsForm(FlaskForm): - module = QuerySelectField( - "action", - query_factory=lambda: DB.session.query(TModules.id_module, TModules.module_label) - .order_by(TModules.module_label) - .all(), - get_pk=lambda mod: str(mod.id_module), - get_label=lambda mod: mod.module_label, - ) - action = QuerySelectField( - "action", - query_factory=lambda: DB.session.query(TActions).all(), - get_pk=lambda act: act.id_action, - get_label=lambda act: act.description_action, - ) - filter = SelectField( - "filtre", - ) - submit = SubmitField("Valider") - - def __init__(self, id_filter_type, *args, **kwargs): - super(OtherPermissionsForm, self).__init__(*args, **kwargs) - # id_filter_type = args[0] - self.filter.choices = [ - (str(filt.id_filter), filt.label_filter) - for filt in DB.session.query(TFilters) - .filter(TFilters.id_filter_type == id_filter_type) - .all() - ] - - -class FilterForm(FlaskForm): - label_filter = StringField("Label", validators=[DataRequired()]) - value_filter = StringField("Valeur du filtre", validators=[DataRequired()]) - description_filter = StringField("Description", validators=[DataRequired()], widget=TextArea()) - submit = SubmitField("Valider") diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/cruved_scope_form.html b/backend/geonature/core/gn_permissions/backoffice/templates/cruved_scope_form.html deleted file mode 100644 index 37b47e0ddb..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/cruved_scope_form.html +++ /dev/null @@ -1,100 +0,0 @@ - - {% include "librairies.html" %} - - - {% include "nav-bar.html" %} - -
- - {% if not id_object %} -

CRUVED de l'utilisateur {{user.nom_role}} {{user.prenom_role if user.prenom_role}} pour le module {{module.module_label}}

- {% else %} - -

CRUVED de l'utilisateur {{user.nom_role}} {{user.prenom_role if user.prenom_role}} pour {{object_instance.code_object}}

-
- {{object_instance.description_object}} -
- {% endif %} - - - {% if not user.groupe %} - - {% endif %} - - {% with messages = get_flashed_messages() %} - {% if messages %} - {% for message in messages %} - - {% endfor %} - {% endif %} - {% endwith %} - -
- {{ form.csrf_token }} {{ form.hidden_tag() }} - - - - - - - - - - - - {% for subfield in form.C %} - - {% endfor %} - - - - {% for subfield in form.R %} - - {% endfor %} - - - - - - {% for subfield in form.U %} - - {% endfor %} - - - - - - {% for subfield in form.V %} - - {% endfor %} - - - - - - {% for subfield in form.E %} - - {% endfor %} - - - - - {% for subfield in form.D %} - - {% endfor %} - - -
Aucune donnéeMes donnéesLes données de mon organismeToutes les données
Créer - Create{{ subfield }}
Lire - Read{{ subfield }}
Mettre à jour - Update{{ subfield }}
Valider - Validate{{ subfield }}
Export - Export{{ subfield }}
Supprimer - Delete{{ subfield }}
- {{form.submit(class="form-control btn-success")}} -
- -
- - diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/cruved_user.html b/backend/geonature/core/gn_permissions/backoffice/templates/cruved_user.html deleted file mode 100644 index 39824f26df..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/cruved_user.html +++ /dev/null @@ -1,131 +0,0 @@ - -{% include "librairies.html" %} - - - {% include "nav-bar.html" %} - -
- {% if user.group %} -
CRUVED du groupe {{user.nom_role}}
- {% else %} -
CRUVED de l'utilisateur {{user.nom_role}} {{user.prenom_role if user.prenom_role}}
- {% endif %} - - - {% if groupes|length > 0 %} - - - {% endif %} - - - {% for mod in modules %} - - - - - - {% endfor %} -
- - {{ mod.module_code }} - {% if mod.module_objects | length > 0 %} -
- - {% endif %} - -
- {% for cruved in mod.module_cruved[0] %} -
    -
  • {{ cruved.label }} : {{ cruved.value }}
  • -
- {% endfor %} -
- {% for obj in mod.module_objects %} - -
- {{ obj.code_object }} - {{obj.description_object}} - - {% if obj.cruved[1] %} - Hérité du module parent - {% endif %} - - -
- - {% for action in obj.cruved[0] %} -
    -
  • - {{action.label}}: {{ action.value }} -
  • -
- {% endfor %} - - - -
- - {% endfor %} -
-
- {% if mod.module_cruved[1] %} - Hérité du module parent {{mod.module_cruved[2][0]}} - {% if mod.module_cruved[2][1] != "ALL" %},{{mod.module_cruved[2][1]}}{% endif %} - {% endif %} - - - - -
- - -
- - - - - - - - - - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/filter_form.html b/backend/geonature/core/gn_permissions/backoffice/templates/filter_form.html deleted file mode 100644 index 719a926873..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/filter_form.html +++ /dev/null @@ -1,36 +0,0 @@ - - - {% include "librairies.html" %} - - - {% include "nav-bar.html" %} -
-
{{'Editer' if id_filter else 'Ajouter'}} un filtre
-

{{filter_type.label_filter_type}}

- -
- - {{ form.csrf_token }} {{ form.hidden_tag() }} -
- {{form.label_filter.label}} - {{ form.label_filter(class="form-control") }} -
-
- {{ form.value_filter.label}} -
- - {{filter_type.description_filter_type}} - - {{ form.value_filter(class="form-control")}} -
-
- {{ form.description_filter.label}} - {{ form.description_filter(class="form-control") }} -
- - {{form.submit(class="form-control btn-success")}} -
-
- - - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/filter_list.html b/backend/geonature/core/gn_permissions/backoffice/templates/filter_list.html deleted file mode 100644 index 4b3a3946d0..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/filter_list.html +++ /dev/null @@ -1,70 +0,0 @@ - - - {% include "librairies.html" %} - - - {% include "nav-bar.html" %} -
- {% with messages = get_flashed_messages() %} - {% if messages %} - {% for message in messages %} - - {% endfor %} - {% endif %} - {% endwith %} -

Filtres - '{{filter_type.label_filter_type}}'

- {{filter_type.description_filter_type}} - - - - - - - - - - {% for filter in filters %} - - - - - - - - - - {% endfor %} -
Label Decription Valeurs
- {{filter.label_filter}} - - - {{filter.description_filter}} - - {{filter.value_filter}} - - - - - - -
- -
-
- - - - -
- - - - - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/librairies.html b/backend/geonature/core/gn_permissions/backoffice/templates/librairies.html deleted file mode 100644 index c0e6ad4d86..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/librairies.html +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/nav-bar.html b/backend/geonature/core/gn_permissions/backoffice/templates/nav-bar.html deleted file mode 100644 index 0d29380a84..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/nav-bar.html +++ /dev/null @@ -1,27 +0,0 @@ - - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/other_permissions_form.html b/backend/geonature/core/gn_permissions/backoffice/templates/other_permissions_form.html deleted file mode 100644 index 815589889a..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/other_permissions_form.html +++ /dev/null @@ -1,31 +0,0 @@ - - - {% include "librairies.html" %} - - - {% include "nav-bar.html" %} -
-

{{filter_type.label_filter_type}} pour le role : {{user.nom_role}} {{user.prenom_role if user.prenom_role}}

- - -
- {{ form.csrf_token }} {{ form.hidden_tag() }} -
- - {{form.module(class="form-control")}} -
-
- - {{form.action(class="form-control")}} -
-
- - {{form.filter(class="form-control")}} -
- - {{form.submit(class="form-control btn-success")}} -
- -
- - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/user_other_permissions.html b/backend/geonature/core/gn_permissions/backoffice/templates/user_other_permissions.html deleted file mode 100644 index 35979691c4..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/user_other_permissions.html +++ /dev/null @@ -1,73 +0,0 @@ - - - {% include "librairies.html" %} - - - {% include "nav-bar.html" %} -
-

Permissions du role {{user.nom_role}} {{user.prenom_role if user.prenom_role}}

- {% if permissions | length > 0 %} - - - - - - - - - - {% for perm in permissions %} - - - - - - - - {% endfor %} - -
Action Module Type de filtre Filtre
{{perm.description_action}} {{perm.module_code}} {{perm.code_filter_type}} {{perm.label_filter}} - - - -
- {% else %} - - - -
-
- - {% endif %} - -
Type de permissions disponibles
- - - {% for filter_type in filter_types %} - - - - - - - {% endfor %} -
{{filter_type.label_filter_type}} - - - - - - - -
- -
- - \ No newline at end of file diff --git a/backend/geonature/core/gn_permissions/backoffice/templates/users.html b/backend/geonature/core/gn_permissions/backoffice/templates/users.html deleted file mode 100644 index 642720b5e1..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/templates/users.html +++ /dev/null @@ -1,56 +0,0 @@ - -
- - - -
- - - {% include "nav-bar.html" %} -
-

Liste des roles

- - {% with messages = get_flashed_messages() %} {% if messages %} {% for - message in messages %} - - {% endfor %} {% endif %} {% endwith %} {% block body %}{% endblock %} - - - - - - - - - - - {% for user in users %} - - - - - - - - - {% endfor %} - -
Id roleNom rolePrenom roleNb CRUVED
{{ user.id_role }}{{ user.nom_role }}{{ user.prenom_role if user.prenom_role }} {{user.nb_cruved}} - - - - - - - -
-
- - - - diff --git a/backend/geonature/core/gn_permissions/backoffice/views.py b/backend/geonature/core/gn_permissions/backoffice/views.py deleted file mode 100644 index 29aae51654..0000000000 --- a/backend/geonature/core/gn_permissions/backoffice/views.py +++ /dev/null @@ -1,420 +0,0 @@ -from flask import ( - request, - render_template, - Blueprint, - flash, - current_app, - redirect, - url_for, - g, -) - -from sqlalchemy.exc import IntegrityError, SQLAlchemyError -from sqlalchemy.orm import joinedload -from sqlalchemy.sql import func -from sqlalchemy import or_ - - -from geonature.utils.env import DB -from geonature.core.gn_permissions.backoffice.forms import ( - CruvedScopeForm, - OtherPermissionsForm, - FilterForm, -) -from geonature.core.gn_permissions.tools import ( - cruved_scope_for_user_in_module, - beautifulize_cruved, -) -from geonature.core.gn_permissions.models import ( - TFilters, - BibFiltersType, - TActions, - CorRoleActionFilterModuleObject, - TObjects, - VUsersPermissions, -) -from geonature.core.users.models import CorRole -from pypnusershub.db.models import Organisme as BibOrganismes, Application -from geonature.core.gn_commons.models import TModules -from geonature.core.gn_permissions import decorators as permissions - - -from pypnusershub.db.models import User, AppRole - -routes = Blueprint("gn_permissions_backoffice", __name__, template_folder="templates") - - -@routes.route( - "cruved_form/module//role//object/", - methods=["GET", "POST"], -) -@routes.route("cruved_form/module//role/", methods=["GET", "POST"]) -@permissions.check_cruved_scope("R", object_code="PERMISSIONS") -def permission_form(id_module, id_role, id_object=None): - """ - .. :quickref: View_Permission; - """ - # TODO: check post permissions - form = None - module = DB.session.query(TModules).get(id_module) - object_instance = None - module_objects = [] - if id_object: - object_instance = DB.session.query(TObjects).get(id_object) - else: - object_instance = DB.session.query(TObjects).filter_by(code_object="ALL").first() - - user = DB.session.query(User).get(id_role) - if request.method == "GET": - cruved, (herited, herited_obj) = cruved_scope_for_user_in_module( - id_role=id_role, - module_code=module.module_code, - object_code=object_instance.code_object, - get_id=True, - get_herited_obj=True, - ) - form = CruvedScopeForm(**cruved) - # get the real cruved of user to set a warning - real_cruved = ( - DB.session.query(CorRoleActionFilterModuleObject) - .filter_by( - id_module=id_module, - id_role=id_role, - id_object=object_instance.id_object, - ) - .all() - ) - if len(real_cruved) == 0 and not module.module_code == "ADMIN": - msg_heritage_obj = "" - if herited_obj: - msg_heritage_obj = f" - objet {herited_obj[1]}" - if herited_obj[1] == "ALL": - msg_heritage_obj = "" - msg_heritage_obj = f" {herited_obj[0]} {msg_heritage_obj}" - flash( - f""" - Attention ce role n'a pas encore de CRUVED dans ce module. - Celui-ci lui est hérité de son groupe et/ou du module parent{msg_heritage_obj} - """ - ) - return render_template( - "cruved_scope_form.html", - form=form, - user=user, - module=module, - object_instance=object_instance, - id_object=id_object, - config=current_app.config, - ) - - else: - form = CruvedScopeForm(request.form) - if form.validate_on_submit(): - actions_id = { - action.code_action: action.id_action for action in DB.session.query(TActions).all() - } - for code_action, id_action in actions_id.items(): - privilege = { - "id_role": id_role, - "id_action": id_action, - "id_module": id_module, - "id_object": object_instance.id_object, - } - # check if a row already exist for a module, a role and an action - # force to not set several filter for the same role-action-module-object - permission_instance = ( - DB.session.query(CorRoleActionFilterModuleObject) - .filter_by(**privilege) - .join( - TFilters, - TFilters.id_filter == CorRoleActionFilterModuleObject.id_filter, - ) - .join( - BibFiltersType, - BibFiltersType.id_filter_type == TFilters.id_filter_type, - ) - .filter(BibFiltersType.code_filter_type == "SCOPE") - .first() - ) - # if already exist update the id_filter - if permission_instance: - permission_instance.id_filter = int(form.data[code_action]) - DB.session.merge(permission_instance) - else: - permission_row = CorRoleActionFilterModuleObject( - id_role=id_role, - id_action=id_action, - id_filter=int(form.data[code_action]), - id_module=id_module, - id_object=object_instance.id_object, - ) - DB.session.add(permission_row) - DB.session.commit() - flash("CRUVED mis à jour pour le role {}".format(user.id_role)) - return redirect(url_for("gn_permissions_backoffice.user_cruved", id_role=id_role)) - - -@routes.route("/users", methods=["GET"]) -@permissions.check_cruved_scope("R", get_scope=True, object_code="PERMISSIONS") -def users(scope): - """ - .. :quickref: View_Permission; - Render a list with all users with their number of cruved - Link to edit cruved and other permissions - Only display user which have profil in GeoNature and active user - """ - - id_app = ( - Application.query.filter_by(code_application=current_app.config["CODE_APPLICATION"]) - .one() - .id_application - ) - q = ( - DB.session.query(AppRole, func.count(CorRoleActionFilterModuleObject.id_role)) - .outerjoin( - CorRoleActionFilterModuleObject, - CorRoleActionFilterModuleObject.id_role == AppRole.id_role, - ) - .filter(AppRole.id_application == id_app) - .group_by(AppRole) - .order_by(AppRole.groupe.desc(), AppRole.nom_role.asc()) - ) - # filter with cruved auth - if scope == 2: - q = q.join( - BibOrganismes, BibOrganismes.id_organisme == g.current_user.id_organisme - ).filter(BibOrganismes == g.current_user.id_organisme) - elif scope == 1: - q = q.filter(User.id_role == g.current_user.id_role) - - data = q.all() - - users = [] - for user in data: - user_dict = user[0].as_dict() - user_dict["nb_cruved"] = user[1] - users.append(user_dict) - return render_template("users.html", users=users, config=current_app.config) - - -@routes.route("/user_cruved/", methods=["GET"]) -@permissions.check_cruved_scope( - "R", - object_code="PERMISSIONS", -) -def user_cruved(id_role): - """ - .. :quickref: View_Permission; - Get all scope CRUVED (with heritage) for a user in all modules - """ - user = DB.session.query(User).get(id_role).as_dict() - modules_data = ( - DB.session.query(TModules) - .options(joinedload(TModules.objects)) - .order_by(TModules.module_order) - .all() - ) - groupes_data = DB.session.query(CorRole).filter(CorRole.id_role_utilisateur == id_role).all() - actions_label = {} - for action in DB.session.query(TActions).all(): - actions_label[action.code_action] = action.description_action - modules = [] - for module in modules_data: - module = module.as_dict(depth=1) - # get cruved for all objects - module_objects_as_dict = [] - for _object in module.get("objects", []): - object_cruved, (herited, herited_obj) = cruved_scope_for_user_in_module( - id_role=id_role, - module_code=module["module_code"], - object_code=_object["code_object"], - get_herited_obj=True, - ) - _object["cruved"] = ( - beautifulize_cruved(actions_label, object_cruved), - herited, - herited_obj, - ) - module_objects_as_dict.append(_object) - - module["module_objects"] = module_objects_as_dict - - # do not display cruved for module which have objects - - cruved, (herited, herited_obj) = cruved_scope_for_user_in_module( - id_role, module["module_code"], get_herited_obj=True - ) - cruved_beautiful = beautifulize_cruved(actions_label, cruved) - module["module_cruved"] = (cruved_beautiful, herited, herited_obj) - modules.append(module) - return render_template( - "cruved_user.html", - user=user, - groupes=[groupe.role.as_dict() for groupe in groupes_data], - modules=modules, - config=current_app.config, - ) - - -@routes.route("/user_other_permissions/", methods=["GET"]) -@permissions.check_cruved_scope( - "R", - object_code="PERMISSIONS", -) -def user_other_permissions(id_role): - """ - .. :quickref: View_Permission; - Get all the permissions define for a user expect SCOPE permissions - """ - user = DB.session.query(User).get(id_role).as_dict() - - permissions = ( - DB.session.query(VUsersPermissions) - .filter(VUsersPermissions.code_filter_type != "SCOPE") - .filter(VUsersPermissions.id_role == id_role) - .order_by(VUsersPermissions.module_code, VUsersPermissions.code_filter_type) - .all() - ) - - filter_types = DB.session.query(BibFiltersType).filter( - BibFiltersType.code_filter_type != "SCOPE" - ) - - return render_template( - "user_other_permissions.html", - user=user, - filter_types=filter_types, - permissions=permissions, - ) - - -@routes.route( - "/other_permissions_form/id_permission//user//filter_type/", - methods=["GET", "POST"], -) -@routes.route( - "/other_permissions_form/user//filter_type/", - methods=["GET", "POST"], -) -@permissions.check_cruved_scope( - "R", - object_code="PERMISSIONS", -) -def other_permissions_form(id_role, id_filter_type, id_permission=None): - """ - Form to define permisisons for a user expect SCOPE permissions - .. :quickref: View_Permission; - """ - if id_permission: - perm = DB.session.query(CorRoleActionFilterModuleObject).get(id_permission) - form = OtherPermissionsForm( - id_filter_type, - action=perm.id_action, - filter=perm.id_filter, - module=perm.id_module, - ) - else: - form = OtherPermissionsForm(id_filter_type) - user = DB.session.query(User).get(id_role).as_dict() - filter_type = DB.session.query(BibFiltersType).get(id_filter_type) - - if request.method == "POST" and form.validate_on_submit(): - permInstance = CorRoleActionFilterModuleObject( - id_permission=id_permission, - id_role=id_role, - id_action=form.data["action"].id_action, - id_filter=int(form.data["filter"]), - id_module=form.data["module"].id_module, - ) - if id_permission: - DB.session.merge(permInstance) - else: - DB.session.add(permInstance) - DB.session.commit() - - return redirect( - url_for("gn_permissions_backoffice.user_other_permissions", id_role=id_role) - ) - - return render_template( - "other_permissions_form.html", user=user, form=form, filter_type=filter_type - ) - - -@routes.route( - "/filter_form/id_filter_type//id_filter/", - methods=["GET", "POST"], -) -@routes.route("/filter_form/id_filter_type/", methods=["GET", "POST"]) -@permissions.check_cruved_scope( - "R", - object_code="PERMISSIONS", -) -def filter_form(id_filter_type, id_filter=None): - """ - .. :quickref: View_Permission; - """ - # TODO: check post permissions - filter_type = DB.session.query(BibFiltersType).get(id_filter_type) - # if id_filter: its an edit, preload the form - if id_filter: - filter_value = DB.session.query(TFilters).get(id_filter).as_dict() - form = FilterForm(**filter_value) - else: - form = FilterForm() - if request.method == "POST" and form.validate_on_submit(): - filter_instance = TFilters( - id_filter=id_filter, - label_filter=form.data["label_filter"], - value_filter=form.data["value_filter"], - description_filter=form.data["description_filter"], - id_filter_type=id_filter_type, - ) - if id_filter: - DB.session.merge(filter_instance) - flash("Filtre édité avec succès") - else: - DB.session.add(filter_instance) - flash("Filtre ajouté avec succès") - DB.session.commit() - return redirect( - url_for("gn_permissions_backoffice.filter_list", id_filter_type=id_filter_type) - ) - return render_template( - "filter_form.html", form=form, filter_type=filter_type, id_filter=id_filter - ) - - -@routes.route("/filter_list/id_filter_type/", methods=["GET"]) -@permissions.check_cruved_scope( - "R", - object_code="PERMISSIONS", -) -def filter_list(id_filter_type): - """ - .. :quickref: View_Permission; - """ - filters = DB.session.query(TFilters).filter(TFilters.id_filter_type == id_filter_type) - filter_type = DB.session.query(BibFiltersType).get(id_filter_type) - return render_template("filter_list.html", filters=filters, filter_type=filter_type) - - -@routes.route("/filter/", methods=["POST"]) -@permissions.check_cruved_scope( - "D", - object_code="PERMISSIONS", -) -def delete_filter(id_filter): - """ - .. :quickref: View_Permission; - """ - my_filter = DB.session.query(TFilters).get_or_404(id_filter) - DB.session.delete(my_filter) - DB.session.commit() - flash("Filtre supprimé avec succès") - return redirect( - url_for( - "gn_permissions_backoffice.filter_list", - id_filter_type=my_filter.id_filter_type, - ) - ) diff --git a/backend/geonature/core/gn_permissions/commands.py b/backend/geonature/core/gn_permissions/commands.py new file mode 100644 index 0000000000..1d9c84e892 --- /dev/null +++ b/backend/geonature/core/gn_permissions/commands.py @@ -0,0 +1,74 @@ +import click +from click import UsageError +import sqlalchemy as sa +from sqlalchemy.orm import contains_eager, joinedload +from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound + +from pypnusershub.db.models import User + +from geonature.utils.env import db +from geonature.core.gn_permissions.models import Permission, PermissionAvailable + + +@click.command( + help="Ajouter des permissions administrateurs sur tous les modules pour un utilisateur ou un groupe." +) +@click.option("--id", "id_role", type=int) +@click.option("--nom", "nom_role") +@click.option("--prenom", "prenom_role") +@click.option("--group", "groupe", flag_value=True, default=None, help="Le rôle est un groupe.") +@click.option( + "--user", "groupe", flag_value=False, default=None, help="Le rôle est un utilisateur." +) +@click.option( + "--skip-existing", + is_flag=True, + help="Ne pas ajouter de permission administrateur s’il existe déjà une permission", +) +@click.option( + "--dry-run", + is_flag=True, + help="Uniquement afficher les permissions nécessaires, sans les ajouter en base", +) +@click.option( + "--yes", + is_flag=True, + help="Répond automatiquement oui à la confirmation", +) +def supergrant(skip_existing, dry_run, yes, **filters): + filters = {k: v for k, v in filters.items() if v is not None} + if not filters: + raise UsageError("Veuillez sélectionner le rôle à promouvoir.") + try: + role = User.query.filter_by(**filters).one() + except MultipleResultsFound: + raise UsageError("Plusieurs rôles correspondent à vos filtres, veuillez les affiner.") + except NoResultFound: + raise UsageError("Aucun rôle ne correspond à vos filtres, veuillez les corriger.") + if not yes: + if not click.confirm( + f"Ajouter les permissions administrateur au rôle {role.id_role} ({role.nom_complet}) ?", + ): + raise click.Abort() + for ap in PermissionAvailable.query.outerjoin( + Permission, sa.and_(PermissionAvailable.permissions, Permission.id_role == role.id_role) + ).options( + contains_eager(PermissionAvailable.permissions), + joinedload(PermissionAvailable.module), + joinedload(PermissionAvailable.object), + joinedload(PermissionAvailable.action), + ): + for perm in ap.permissions: + if skip_existing or not perm.filters: + break + else: + # The role does not have any permission of this type, + # or only permissions with at least one filter. + # We add an new permission without any filters. + click.echo( + f"Nouvelle permission : module '{ap.module.module_code}', " + f"objet '{ap.object.code_object}', action '{ap.action.code_action}'" + ) + db.session.add(Permission(availability=ap, role=role)) + if not dry_run: + db.session.commit() diff --git a/backend/geonature/core/gn_permissions/decorators.py b/backend/geonature/core/gn_permissions/decorators.py index d9f95fb644..eb94666023 100644 --- a/backend/geonature/core/gn_permissions/decorators.py +++ b/backend/geonature/core/gn_permissions/decorators.py @@ -7,10 +7,16 @@ from flask import request, g from werkzeug.exceptions import Unauthorized, Forbidden -from geonature.core.gn_permissions.tools import ( - get_user_from_token_and_raise, - UserCruved, -) +from geonature.core.gn_permissions.tools import get_permissions, get_scopes_by_action + + +def _forbidden_message(action, module_code, object_code): + message = f"User {g.current_user.id_role} has no permissions to {action}" + if module_code: + message += f" in {module_code}" + if object_code: + message += f" on {object_code}" + return message def login_required(view_func): @@ -25,64 +31,57 @@ def decorated_view(*args, **kwargs): def check_cruved_scope( action, - get_role=False, module_code=None, object_code=None, - redirect_on_expiration=None, - redirect_on_invalid_token=None, *, get_scope=False, ): """ Decorator to protect routes with SCOPE CRUVED - The decorator first check if the user is connected and have a correct token (get_user_from_token_and_raise) + The decorator first check if the user is connected and then return the max user SCOPE permission for the action in parameter The decorator manages herited CRUVED from user's group and parent module (GeoNature) - Return a VUsersPermissions as kwargs of the decorated function as 'info_role' parameter Parameters: action(string): the requested action of the route <'C', 'R', 'U', 'V', 'E', 'D'> - get_role(boolean): is the decorator should retour the VUsersPermissions object as kwargs - module_code(string): the code of the module (gn_commons.t_modules) ('OCCTAX') for the requested permission - object_code(string): the code of the object (gn_permissions.t_object) for the requested permission ('PERMISSIONS') + module_code(string): the code of the module (gn_commons.t_modules) (e.g. 'OCCTAX') for the requested permission + object_code(string): the code of the object (gn_permissions.t_object) for the requested permission (e.g. 'PERMISSIONS') + get_scope(boolean): does the decorator should add the scope to view kwargs """ - def _check_cruved_scope(fn): - @wraps(fn) - def __check_cruved_scope(*args, **kwargs): - user = get_user_from_token_and_raise( - request, redirect_on_expiration, redirect_on_invalid_token - ) - user_with_highter_perm = None - - user_with_highter_perm = UserCruved( - id_role=user["id_role"], - code_filter_type="SCOPE", - module_code=module_code, - object_code=object_code, - ).get_herited_user_cruved_by_action(action) - if user_with_highter_perm: - user_with_highter_perm = user_with_highter_perm[0] - - # if no perm or perm = 0 -> raise 403 - if user_with_highter_perm is None or user_with_highter_perm.value_filter == "0": - if object_code: - message = f"""User {user["id_role"]} cannot "{action}" in {module_code} on {object_code}""" - else: - message = f"""User {user["id_role"]} cannot "{action}" in {module_code}""" - raise Forbidden(description=message) - # if get_role = True : set info_role as kwargs - if get_role: - warn( - "'get_role' is deprecated, see https://github.com/PnX-SI/GeoNature/issues/2162", - DeprecationWarning, - ) - kwargs["info_role"] = user_with_highter_perm + def _check_cruved_scope(view_func): + @wraps(view_func) + def decorated_view(*args, **kwargs): + if g.current_user is None: + raise Unauthorized + scope = get_scopes_by_action(module_code=module_code, object_code=object_code)[action] + if not scope: + raise Forbidden(description=_forbidden_message(action, module_code, object_code)) if get_scope: - kwargs["scope"] = int(user_with_highter_perm.value_filter) - g.user = user_with_highter_perm - return fn(*args, **kwargs) + kwargs["scope"] = scope + return view_func(*args, **kwargs) - return __check_cruved_scope + return decorated_view return _check_cruved_scope + + +def permissions_required( + action, + module_code=None, + object_code=None, +): + def _permission_required(view_func): + @wraps(view_func) + def decorated_view(*args, **kwargs): + if g.current_user is None: + raise Unauthorized + permissions = get_permissions(action, module_code=module_code, object_code=object_code) + if not permissions: + raise Forbidden(description=_forbidden_message(action, module_code, object_code)) + kwargs["permissions"] = permissions + return view_func(*args, **kwargs) + + return decorated_view + + return _permission_required diff --git a/backend/geonature/core/gn_permissions/models.py b/backend/geonature/core/gn_permissions/models.py index 5655b6b1fc..237e95cce2 100644 --- a/backend/geonature/core/gn_permissions/models.py +++ b/backend/geonature/core/gn_permissions/models.py @@ -1,96 +1,74 @@ """ Models of gn_permissions schema """ +from packaging import version +import sqlalchemy as sa from sqlalchemy import ForeignKey from sqlalchemy.sql import select +from sqlalchemy.orm import foreign, joinedload, contains_eager +import flask_sqlalchemy + +if version.parse(flask_sqlalchemy.__version__) >= version.parse("3"): + from flask_sqlalchemy.query import Query +else: + from flask_sqlalchemy import BaseQuery as Query from utils_flask_sqla.serializers import serializable from pypnusershub.db.models import User -from geonature.utils.env import DB +from geonature.utils.env import db @serializable -class VUsersPermissions(DB.Model): - __tablename__ = "v_roles_permissions" - __table_args__ = {"schema": "gn_permissions"} - id_role = DB.Column(DB.Integer, primary_key=True) - nom_role = DB.Column(DB.Unicode) - prenom_role = DB.Column(DB.Unicode) - id_organisme = DB.Column(DB.Integer) - id_module = DB.Column(DB.Integer, primary_key=True) - module_code = DB.Column(DB.Unicode) - code_object = DB.Column(DB.Unicode) - id_action = DB.Column(DB.Integer, primary_key=True) - description_action = DB.Column(DB.Unicode) - id_filter = DB.Column(DB.Integer, primary_key=True) - label_filter = DB.Column(DB.Integer, primary_key=True) - code_action = DB.Column(DB.Unicode) - description_action = DB.Column(DB.Unicode) - value_filter = DB.Column(DB.Unicode) - code_filter_type = DB.Column(DB.Unicode) - id_filter_type = DB.Column(DB.Integer) - id_permission = DB.Column(DB.Integer) - - def __str__(self): - return """VUsersPermissions - role='{}' action='{}' filter='{}' module='{}' filter_type='{}' object='{} >""".format( - self.id_role, - self.code_action, - self.value_filter, - self.module_code, - self.code_filter_type, - self.code_object, - ) - - -@serializable -class BibFiltersType(DB.Model): +class PermFilterType(db.Model): __tablename__ = "bib_filters_type" __table_args__ = {"schema": "gn_permissions"} - id_filter_type = DB.Column(DB.Integer, primary_key=True) - code_filter_type = DB.Column(DB.Unicode) - label_filter_type = DB.Column(DB.Unicode) - description_filter_type = DB.Column(DB.Unicode) + id_filter_type = db.Column(db.Integer, primary_key=True) + code_filter_type = db.Column(db.Unicode) + label_filter_type = db.Column(db.Unicode) + description_filter_type = db.Column(db.Unicode) @serializable -class TFilters(DB.Model): - __tablename__ = "t_filters" +class PermScope(db.Model): + __tablename__ = "bib_filters_scope" __table_args__ = {"schema": "gn_permissions"} - id_filter = DB.Column(DB.Integer, primary_key=True) - value_filter = DB.Column(DB.Unicode) - label_filter = DB.Column(DB.Unicode) - description_filter = DB.Column(DB.Unicode) - id_filter_type = DB.Column(DB.Integer, ForeignKey(BibFiltersType.id_filter_type)) - filter_type = DB.relationship(BibFiltersType) + value = db.Column(db.Integer, primary_key=True) + label = db.Column(db.Unicode) + description = db.Column(db.Unicode) + + def __str__(self): + return self.description @serializable -class TActions(DB.Model): - __tablename__ = "t_actions" +class PermAction(db.Model): + __tablename__ = "bib_actions" __table_args__ = {"schema": "gn_permissions"} - id_action = DB.Column(DB.Integer, primary_key=True) - code_action = DB.Column(DB.Unicode) - description_action = DB.Column(DB.Unicode) + id_action = db.Column(db.Integer, primary_key=True) + code_action = db.Column(db.Unicode) + description_action = db.Column(db.Unicode) + + def __str__(self): + return self.description_action -cor_object_module = DB.Table( +cor_object_module = db.Table( "cor_object_module", - DB.Column( + db.Column( "id_cor_object_module", - DB.Integer, + db.Integer, primary_key=True, ), - DB.Column( + db.Column( "id_object", - DB.Integer, + db.Integer, ForeignKey("gn_permissions.t_objects.id_object"), ), - DB.Column( + db.Column( "id_module", - DB.Integer, + db.Integer, ForeignKey("gn_commons.t_modules.id_module"), ), schema="gn_permissions", @@ -98,78 +76,196 @@ class TActions(DB.Model): @serializable -class TObjects(DB.Model): +class PermObject(db.Model): __tablename__ = "t_objects" __table_args__ = {"schema": "gn_permissions"} - id_object = DB.Column(DB.Integer, primary_key=True) - code_object = DB.Column(DB.Unicode) - description_object = DB.Column(DB.Unicode) + id_object = db.Column(db.Integer, primary_key=True) + code_object = db.Column(db.Unicode) + description_object = db.Column(db.Unicode) def __str__(self): return f"{self.code_object} ({self.description_object})" -@serializable -class CorRoleActionFilterModuleObject(DB.Model): - __tablename__ = "cor_role_action_filter_module_object" +# compat. +TObjects = PermObject + + +def _nice_order(model, qs): + from geonature.core.gn_commons.models import TModules + + return ( + qs.join(model.module) + .join(model.object) + .join(model.action) + .options( + contains_eager(model.module), + contains_eager(model.object), + contains_eager(model.action), + ) + .order_by( + TModules.module_code, + # ensure ALL at first: + sa.case([(PermObject.code_object == "ALL", "1")], else_=PermObject.code_object), + model.id_action, + ) + ) + + +class PermissionAvailableQuery(Query): + def nice_order(self): + return _nice_order(PermissionAvailable, self) + + +class PermissionQuery(Query): + def nice_order(self): + return _nice_order(Permission, self) + + +class PermissionAvailable(db.Model): + __tablename__ = "t_permissions_available" __table_args__ = {"schema": "gn_permissions"} - id_permission = DB.Column(DB.Integer, primary_key=True) - id_role = DB.Column(DB.Integer, ForeignKey("utilisateurs.t_roles.id_role")) - id_action = DB.Column(DB.Integer, ForeignKey("gn_permissions.t_actions.id_action")) - id_filter = DB.Column(DB.Integer, ForeignKey("gn_permissions.t_filters.id_filter")) - id_module = DB.Column(DB.Integer, ForeignKey("gn_commons.t_modules.id_module")) - id_object = DB.Column( - DB.Integer, - ForeignKey("gn_permissions.t_objects.id_object"), - default=select([TObjects.id_object]).where(TObjects.code_object == "ALL"), + query_class = PermissionAvailableQuery + + id_module = db.Column( + db.Integer, ForeignKey("gn_commons.t_modules.id_module"), primary_key=True + ) + id_object = db.Column( + db.Integer, + ForeignKey(PermObject.id_object), + default=select([PermObject.id_object]).where(PermObject.code_object == "ALL"), + primary_key=True, ) + id_action = db.Column(db.Integer, ForeignKey(PermAction.id_action), primary_key=True) + label = db.Column(db.Unicode) + + module = db.relationship("TModules") + object = db.relationship(PermObject) + action = db.relationship(PermAction) + + scope_filter = db.Column(db.Boolean, server_default=sa.false()) + sensitivity_filter = db.Column(db.Boolean, server_default=sa.false(), nullable=False) + + filters_fields = { + "SCOPE": scope_filter, + "SENSITIVITY": sensitivity_filter, + } + + @property + def filters(self): + return [k for k, v in self.filters_fields.items() if getattr(self, v.name)] + + def __str__(self): + s = self.module.module_label + if self.object.code_object != "ALL": + object_label = self.object.code_object.title().replace("_", " ") + s += f" | {object_label}" + s += f" | {self.label}" + return s + + +class PermFilter: + def __init__(self, name, value): + self.name = name + self.value = value + + def __str__(self): + if self.name == "SCOPE": + if self.value is None: + return """ de tout le monde""" + elif self.value == 1: + return """ à moi""" + elif self.value == 2: + return """ de mon organisme""" + elif self.name == "SENSITIVITY": + if self.value: + return """ non sensible""" + else: + return """ sensible et non sensible""" - role = DB.relationship(User, primaryjoin=(User.id_role == id_role), foreign_keys=[id_role]) - action = DB.relationship( - TActions, - primaryjoin=(TActions.id_action == id_action), - foreign_keys=[id_action], +@serializable +class Permission(db.Model): + __tablename__ = "t_permissions" + __table_args__ = {"schema": "gn_permissions"} + query_class = PermissionQuery + + id_permission = db.Column(db.Integer, primary_key=True) + id_role = db.Column(db.Integer, ForeignKey("utilisateurs.t_roles.id_role")) + id_action = db.Column(db.Integer, ForeignKey(PermAction.id_action)) + id_module = db.Column(db.Integer, ForeignKey("gn_commons.t_modules.id_module")) + id_object = db.Column( + db.Integer, + ForeignKey(PermObject.id_object), + default=select([PermObject.id_object]).where(PermObject.code_object == "ALL"), ) - filter = DB.relationship( - TFilters, - primaryjoin=(TFilters.id_filter == id_filter), - foreign_keys=[id_filter], + role = db.relationship(User, backref="permissions") + action = db.relationship(PermAction) + module = db.relationship("TModules") + object = db.relationship(PermObject) + + scope_value = db.Column(db.Integer, ForeignKey(PermScope.value), nullable=True) + scope = db.relationship(PermScope) + sensitivity_filter = db.Column(db.Boolean, server_default=sa.false(), nullable=False) + + availability = db.relationship( + PermissionAvailable, + primaryjoin=sa.and_( + foreign(id_module) == PermissionAvailable.id_module, + foreign(id_object) == PermissionAvailable.id_object, + foreign(id_action) == PermissionAvailable.id_action, + ), + backref="permissions", ) - module = DB.relationship("TModules") - object = DB.relationship("TObjects") + filters_fields = { + "SCOPE": scope_value, + "SENSITIVITY": sensitivity_filter, + } + + @staticmethod + def __SCOPE_le__(a, b): + return b is None or (a is not None and a <= b) - def is_permission_already_exist( - self, id_role, id_action, id_module, id_filter_type, id_object=1 - ): + @staticmethod + def __SENSITIVITY_le__(a, b): + # False only if: A is False and b is True + return (not a) <= (not b) + + @staticmethod + def __default_le__(a, b): + return a == b or b is None + + def __le__(self, other): """ - Tell if a permission exist for a user, an action, a module and a filter_type - Return: - A CorRoleActionFilterModuleObject if exist or None + Return True if this permission is supersed by 'other' permission. + This requires all filters to be supersed by 'other' filters. """ - privilege = { - "id_role": id_role, - "id_action": id_action, - "id_module": id_module, - "id_object": id_object, - } - return ( - DB.session.query(CorRoleActionFilterModuleObject) - .filter_by(**privilege) - .join(TFilters, TFilters == CorRoleActionFilterModuleObject.id_filter) - .join(BibFiltersType, BibFiltersType.id_filter_type == TFilters.id_filter) - .filter(BibFiltersType.id_filter_type == id_filter_type) - .first() - ) + for name, field in self.filters_fields.items(): + # Get filter comparison function or use default comparison function + __le_fct__ = getattr(self, f"__{name}_le__", Permission.__default_le__) + self_value, other_value = getattr(self, field.name), getattr(other, field.name) + if not __le_fct__(self_value, other_value): + return False + return True - def __str__(self): - return ( - f"Permission(" - f"id_role={self.id_role}," - f"action={self.action}," - f"filter={self.filter}," - f"module={self.module}," - f"object={self.object})" - ) + @property + def filters(self): + filters = [] + for name, field in self.filters_fields.items(): + value = getattr(self, field.name) + if field.nullable: + if value is None: + continue + if field.type.python_type == bool: + if not value: + continue + filters.append(PermFilter(name, value)) + return filters + + def has_other_filters_than(self, *expected_filters): + for flt in self.filters: + if flt.name not in expected_filters: + return True + return False diff --git a/backend/geonature/core/gn_permissions/routes.py b/backend/geonature/core/gn_permissions/routes.py index 9641f6eab8..cedc92c582 100644 --- a/backend/geonature/core/gn_permissions/routes.py +++ b/backend/geonature/core/gn_permissions/routes.py @@ -12,10 +12,14 @@ from utils_flask_sqla.response import json_resp from geonature.core.gn_commons.models import TModules from geonature.core.gn_permissions import decorators as permissions -from geonature.core.gn_permissions.tools import cruved_scope_for_user_in_module +from geonature.core.gn_permissions.commands import supergrant -routes = Blueprint("gn_permissions", __name__) +routes = Blueprint( + "gn_permissions", __name__, cli_group="permissions", template_folder="./templates" +) + +routes.cli.add_command(supergrant) @routes.route("/logout_cruved", methods=["GET"]) diff --git a/backend/geonature/core/gn_permissions/templates/role_or_group_detail.html b/backend/geonature/core/gn_permissions/templates/role_or_group_detail.html new file mode 100644 index 0000000000..f29e813984 --- /dev/null +++ b/backend/geonature/core/gn_permissions/templates/role_or_group_detail.html @@ -0,0 +1,10 @@ +{% extends 'admin/model/details.html' %} + +{% block details_search %} + +{% endblock %} + + +{% block details_table %} + {{super()}} +{% endblock %} diff --git a/backend/geonature/core/gn_permissions/tools.py b/backend/geonature/core/gn_permissions/tools.py index 9f24cad0ba..659f5df50c 100644 --- a/backend/geonature/core/gn_permissions/tools.py +++ b/backend/geonature/core/gn_permissions/tools.py @@ -1,368 +1,157 @@ -import logging, json - -from flask import current_app, redirect, Response, g -from werkzeug.exceptions import Forbidden, Unauthorized -from werkzeug.routing import RequestRedirect -from authlib.jose.errors import ExpiredTokenError, JoseError +import logging +from itertools import groupby, permutations import sqlalchemy as sa -from sqlalchemy.sql.expression import func - - -from pypnusershub.db.tools import ( - AccessRightsExpiredError, - UnreadableAccessRightsError, - decode_token, +from sqlalchemy.orm import joinedload +from flask import g + +from geonature.core.gn_commons.models import TModules +from geonature.core.gn_permissions.models import ( + PermAction, + PermObject, + PermScope, + Permission, ) +from geonature.utils.env import db -from geonature.core.gn_permissions.models import VUsersPermissions, TFilters -from geonature.utils.env import DB - -log = logging.getLogger(__name__) +from pypnusershub.db.models import User +log = logging.getLogger() -def user_from_token(token, secret_key=None): - secret_key = secret_key or current_app.config["SECRET_KEY"] - try: - return decode_token(token) - except ExpiredTokenError: - raise AccessRightsExpiredError("Token expired") - except JoseError: - raise UnreadableAccessRightsError("Token BadSignature", 403) - - -def log_expiration_warning(): - log.warning( - """ - The parameter redirect_on_expiration will be soon removed. - The redirection will be default to GeoNature login page - """ +def _get_user_permissions(id_role): + return ( + Permission.query.options( + joinedload(Permission.module), + joinedload(Permission.object), + joinedload(Permission.action), + ) + .filter( + sa.or_( + # direct permissions + Permission.id_role == id_role, + # permissions through group + Permission.role.has(User.members.any(User.id_role == id_role)), + ), + ) + # remove duplicate permissions (defined at group and user level, or defined in several groups) + .order_by(Permission.id_module, Permission.id_object, Permission.id_action) + .distinct( + Permission.id_module, + Permission.id_object, + Permission.id_action, + *Permission.filters_fields.values(), + ) + .all() ) -def get_user_from_token_and_raise( - request, redirect_on_expiration=None, redirect_on_invalid_token=None -): - """ - Deserialize the token - catch excetpion and return appropriate Response(403, 302 ...) - """ - try: - token = request.cookies["token"] - return user_from_token(token) +def get_user_permissions(id_role=None): + if id_role is None: + id_role = g.current_user.id_role + if id_role not in g._permissions_by_user: + g._permissions_by_user[id_role] = _get_user_permissions(id_role) + return g._permissions_by_user[id_role] + + +def _get_permissions(id_role, module_code, object_code, action_code): + permissions = { + p + for p in get_user_permissions(id_role) + if p.module.module_code == module_code + and p.object.code_object == object_code + and p.action.code_action == action_code + } - except KeyError: - if redirect_on_expiration: - log_expiration_warning() - raise RequestRedirect(new_url=redirect_on_expiration) - raise Unauthorized(description="No token.") - except AccessRightsExpiredError: - if redirect_on_expiration: - log_expiration_warning() - raise RequestRedirect(new_url=redirect_on_expiration) - raise Unauthorized(description="Token expired.") - except UnreadableAccessRightsError: - if redirect_on_invalid_token: - log_expiration_warning() - raise RequestRedirect(new_url=redirect_on_invalid_token) - raise Unauthorized(description="Token corrupted.") - except Exception as e: - trap_all_exceptions = current_app.config.get("TRAP_ALL_EXCEPTIONS", True) - if not trap_all_exceptions: - raise - log.critical(e) - raise Unauthorized(description=repr(e)) + # Remove all permissions supersed by another permission + for p1, p2 in permutations(permissions, 2): + if p1 in permissions and p1 <= p2: + permissions.remove(p1) + return permissions -class UserCruved: - """ - Classe permettant de récupérer le cruved d'un utilisateur - pour un module et un objet données +def get_permissions(action_code, id_role=None, module_code=None, object_code=None): """ + This function returns a set of all the permissions that match (action_code, id_role, module_code, object_code). + If module_code is None, it is set as the code of the current module or as "GEONATURE" if no current module found. + If object_code is None, it is set as the code of the current object or as "ALL" if no current object found. - _main_module_code = "GEONATURE" - _main_object_code = "ALL" - _cruved_actions = ["C", "R", "U", "V", "E", "D"] - - def __init__( - self, id_role, code_filter_type, module_code=None, object_code=None, append_to_select=None - ): - - self._id_role = id_role - self._code_filter_type = code_filter_type - if module_code: - self._module_code = module_code - elif hasattr(g, "current_module"): - self._module_code = g.current_module.module_code + :returns : the list of permissions that match, and an empty list if no match + """ + if module_code is None: + if hasattr(g, "current_module"): + module_code = g.current_module.module_code else: - self._module_code = self._main_module_code - self._object_code = object_code - self._permission_select = self._build_permission_select_list(append_to_select) - - def _build_permission_select_list(self, append_to_select): - - # Construction de la liste des couples module_code, object_code - # a récupérer pour générer le cruved - # append_to_select => Ajout de selection pour complexifié l'héritage - permissions_select = { - 0: [self._module_code, self._object_code], - 10: [self._module_code, self._main_object_code], - 20: [self._main_module_code, self._object_code], - 30: [self._main_module_code, self._main_object_code], - } - - # append_to_select - if append_to_select: - permissions_select = {**permissions_select, **append_to_select} - - # filter null value - active_permissions_select = {k: v for k, v in permissions_select.items() if v[0] and v[1]} - - return active_permissions_select - - def _build_query_permission(self, code_action=None): - """ - Construction de la requete de récupération des permissions - Ordre de récupération - - code_objet et module_code - - ALL et module_code - - code_objet et GEONATURE - - ALL et GEONATURE - """ - q = VUsersPermissions.query.filter(VUsersPermissions.id_role == self._id_role).filter( - VUsersPermissions.code_filter_type == self._code_filter_type - ) - if code_action: - q = q.filter(VUsersPermissions.code_action == code_action) - - # Liste des couples module_code, object_code à sélectionner - ors = [] - for k, (module_code, object_code) in self._permission_select.items(): - ors.append( - sa.and_( - VUsersPermissions.module_code.ilike(module_code), - VUsersPermissions.code_object == object_code, - ) - ) - - return q.filter(sa.or_(*ors)).all() - - def get_user_perm_list(self, code_action=None): - return self._build_query_permission(code_action=code_action) + module_code = "GEONATURE" - def get_max_perm(self, perm_list): - """ - Return the max filter_code from a list of VUsersPermissions instance - get_max_perm return a list of VUsersPermissions from its group or himself - """ - user_with_highter_perm = perm_list[0] - max_code = user_with_highter_perm.value_filter - i = 1 - while i < len(perm_list): - if int(perm_list[i].value_filter) >= int(max_code): - max_code = perm_list[i].value_filter - user_with_highter_perm = perm_list[i] - i = i + 1 - return user_with_highter_perm - - def build_herited_user_cruved(self, user_permissions): - """ - Construction des permissions pour un utilisateur - pour une liste de permission données - - Parameters: - - user_permissions(list) - Return: - VUsersPermissions - herited - herited_object - """ - # loop on user permissions - # return the module permission if exist - # otherwise return GEONATURE permission - type_of_perm = {} - - # Liste des clés des paramètres de of select trié - permission_keys = sorted(self._permission_select) - - # filter the GeoNature perm and the module perm in two - # arrays to make heritage - for user_permission in user_permissions: - for k, (module_code, object_code) in self._permission_select.items(): - if ( - user_permission.code_object == object_code - and user_permission.module_code == module_code - ): - type_of_perm.setdefault(k, []).append(user_permission) - - # take the max of the different permissions - herited = False - herited_object = None - for k in permission_keys: - if k in type_of_perm and len(type_of_perm[k]) > 0: - # Si la clé n'est pas le première de la liste - # Alors héritage - if k > permission_keys[0]: - herited = True - herited_object = self._permission_select[k] - return self.get_max_perm(type_of_perm[k]), herited, herited_object - - def get_herited_user_cruved(self): - user_permissions = self.get_user_perm_list() - return self.build_herited_user_cruved(user_permissions) + if object_code is None: + if hasattr(g, "current_object"): + object_code = g.current_object.code_object + else: + object_code = "ALL" - def get_perm_for_all_actions(self, get_id): - """ - Construction des permissions pour - chaque action d'un module/objet données + ident = (id_role, module_code, object_code, action_code) + if ident not in g._permissions: + g._permissions[ident] = _get_permissions(*ident) + return g._permissions[ident] - Parameters: - - get_id(boolean) : indique si la valeur de la permission retournée - correspond au code (False) ou à son identifiant (True) - Return: - - herited_cruved : valeur max de la permission pour chaque action du cruved - - herited(boolean) : True si hérité, False sinon - - herited_object((module_code, object_code)) : si herited - nom du module/objet pour lequel la valeur du cruved est retourné - """ - # Récupération de l'ensemble des permissions - user_permissions = self.get_user_perm_list() - perm_by_actions = {} +def get_scope(action_code, id_role=None, module_code=None, object_code=None, bypass_warning=False): + """ + This function gets the final scope permission. - # Pour chaque permission tri en fonction de son action - for perm in user_permissions: - perm_by_actions.setdefault(perm.code_action, []).append(perm) + It takes the maximum for all the permissions that match (action_code, id_role, module_code, object_code) and with + of a "SCOPE" filter type. - # Récupération de la valeur par défaut qui doit être retournée - if get_id: - default_value = ( - DB.session.query(TFilters.id_filter).filter(TFilters.value_filter == "0").one()[0] + :returns : (int) The scope computed for specified arguments + """ + permissions = get_permissions(action_code, id_role, module_code, object_code) + max_scope = 0 + for permission in permissions: + if permission.has_other_filters_than("SCOPE") and not bypass_warning: + log.warning( + f"""WARNING : You are trying to get scope permission for a module ({module_code}) which implements other permissions type. Please use get_permission instead + """ ) - select_col = "id_filter" + if permission.scope_value is None: + max_scope = 3 else: - default_value = "0" - select_col = "value_filter" - - herited_perm = {} # Liste des permissions du cruved - is_herited = False - g_herited_object = None - - # Pour chaque action construction des permissions - for action, perm in perm_by_actions.items(): - herited_perm[action], herited, herited_object = self.build_herited_user_cruved(perm) - if herited: - is_herited = True - g_herited_object = herited_object - - # Mise en forme - herited_cruved = {} - for action in self._cruved_actions: - if action in herited_perm: - herited_cruved[action] = getattr(herited_perm[action], select_col) - else: - herited_cruved[action] = default_value + max_scope = max(max_scope, permission.scope_value) + return max_scope - return herited_cruved, is_herited, g_herited_object - def get_herited_user_cruved_by_action(self, action): - """ - Récupération des permissions par action - """ - permissions = self._build_query_permission(action) - return self.build_herited_user_cruved(permissions) - - -def beautifulize_cruved(actions, cruved): - """ - Build more readable the cruved dict with the actions label - Params: - actions: dict action {'C': 'Action de créer'} - cruved: dict of cruved - Return: - Array [{'label': 'Action de Lire', 'value': '3'}] +def get_scopes_by_action(id_role=None, module_code=None, object_code=None): """ - cruved_beautiful = [] - for key, value in cruved.items(): - temp = {} - temp["label"] = actions.get(key) - temp["value"] = value - cruved_beautiful.append(temp) - return cruved_beautiful - + This function gets the scopes permissions for each one of the 6 actions in "CRUVED", + that match (id_role, module_code, object_code) -def cruved_scope_for_user_in_module( - id_role=None, - module_code=None, - object_code=None, - get_id=False, - get_herited_obj=False, - append_to_select=None, -): + :returns : (dict) A dict of the scope for each one of the 6 actions (the char in "CRUVED") """ - get the user cruved for a module or object - if no cruved for a module, the cruved parent module is taken - Child app cruved alway overright parent module cruved - Params: - - id_role(int) - - module_code(str) - - object_code(str) - - get_id(bool): if true return the id_scope for each action - if false return the filter_value for each action - - append_to_select (dict) : dict of extra select module object for heritage - Return a tuple - - index 0: the cruved as a dict : {'C': 0, 'R': 2 ...} - - index 1: a boolean which say if its an herited cruved - """ - herited_cruved, is_herited, herited_object = UserCruved( - id_role=id_role, - code_filter_type="SCOPE", - module_code=module_code, - object_code=object_code, - append_to_select=append_to_select, - ).get_perm_for_all_actions(get_id) - if get_herited_obj: - is_herited = (is_herited, herited_object) - return herited_cruved, is_herited - - -def _get_scopes_by_action(id_role, module_code, object_code): - cruved = UserCruved( - id_role=id_role, code_filter_type="SCOPE", module_code=module_code, object_code=object_code - ) return { - action: int(scope) - for action, scope in cruved.get_perm_for_all_actions(get_id=False)[0].items() + action_code: get_scope(action_code, id_role, module_code, object_code) + for action_code in "CRUVED" } -def get_scopes_by_action(id_role=None, module_code=None, object_code=None): - if id_role is None: - id_role = g.current_user.id_role - if "scopes_by_action" not in g: - g.scopes_by_action = dict() - key = (id_role, module_code, object_code) - if key not in g.scopes_by_action: - g.scopes_by_action[key] = _get_scopes_by_action(*key) - return g.scopes_by_action[key] +def has_any_permissions(action_code, id_role=None, module_code=None, object_code=None) -> bool: + """ + This function return the scope for an action, a module and an object as a Boolean + Use for frontend + """ + permissions = get_permissions(action_code, id_role, module_code, object_code) + return True if len(permissions) > 0 else False -def get_or_fetch_user_cruved(session=None, id_role=None, module_code=None, object_code=None): +def has_any_permissions_by_action(id_role=None, module_code=None, object_code=None): """ - Check if the cruved is in the session - if not, get the cruved from the DB with - cruved_for_user_in_app() + This function gets the scopes permissions for each one of the 6 actions in "CRUVED", + that match (id_role, module_code, object_code) + + :returns : (dict) A dict of the boolean for each one of the 6 actions (the char in "CRUVED") """ - if module_code in session and "user_cruved" in session[module_code]: - # FIXME object_code is not checked! - return session[module_code]["user_cruved"] - else: - user_cruved = cruved_scope_for_user_in_module( - id_role=id_role, module_code=module_code, object_code=object_code - )[0] - session[module_code] = {} - session[module_code]["user_cruved"] = user_cruved - return user_cruved + return { + action_code: has_any_permissions(action_code, id_role, module_code, object_code) + for action_code in "CRUVED" + } diff --git a/backend/geonature/core/gn_profiles/routes.py b/backend/geonature/core/gn_profiles/routes.py index 930402fd40..59783582fd 100644 --- a/backend/geonature/core/gn_profiles/routes.py +++ b/backend/geonature/core/gn_profiles/routes.py @@ -16,6 +16,7 @@ VmValidProfiles, VConsistancyData, ) +import geonature.core.gn_profiles.tasks # noqa: F401 from geonature.utils.env import DB routes = Blueprint("gn_profiles", __name__, cli_group="profiles") diff --git a/backend/geonature/core/gn_profiles/tasks.py b/backend/geonature/core/gn_profiles/tasks.py new file mode 100644 index 0000000000..4d9d66c67a --- /dev/null +++ b/backend/geonature/core/gn_profiles/tasks.py @@ -0,0 +1,36 @@ +from sqlalchemy.sql import func +from celery.utils.log import get_task_logger +from celery.schedules import crontab + +from geonature.utils.env import db +from geonature.utils.config import config +from geonature.utils.celery import celery_app + + +logger = get_task_logger(__name__) + + +@celery_app.on_after_finalize.connect +def setup_periodic_tasks(sender, **kwargs): + ct = config["PROFILES_REFRESH_CRONTAB"] + if ct: + minute, hour, day_of_month, month_of_year, day_of_week = ct.split(" ") + sender.add_periodic_task( + crontab( + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + month_of_year=month_of_year, + ), + refresh_profiles.s(), + name="refresh profiles", + ) + + +@celery_app.task(bind=True) +def refresh_profiles(self): + logger.info("Refresh profiles...") + db.session.execute(func.gn_profiles.refresh_profiles()) + db.session.commit() + logger.info("Profiles refreshed.") diff --git a/backend/geonature/core/gn_synthese/models.py b/backend/geonature/core/gn_synthese/models.py index 76991b5919..14b9f0d347 100644 --- a/backend/geonature/core/gn_synthese/models.py +++ b/backend/geonature/core/gn_synthese/models.py @@ -1,8 +1,10 @@ from collections import OrderedDict +from packaging import version +from typing import List import sqlalchemy as sa import datetime -from sqlalchemy import ForeignKey +from sqlalchemy import ForeignKey, Unicode, and_, DateTime from sqlalchemy.orm import ( relationship, column_property, @@ -12,15 +14,22 @@ deferred, ) from sqlalchemy.sql import select, func, exists +from sqlalchemy.schema import FetchedValue from sqlalchemy.dialects.postgresql import UUID, JSONB from geoalchemy2 import Geometry from geoalchemy2.shape import to_shape from geojson import Feature from flask import g -from flask_sqlalchemy import BaseQuery +import flask_sqlalchemy + +if version.parse(flask_sqlalchemy.__version__) >= version.parse("3"): + from flask_sqlalchemy.query import Query +else: + from flask_sqlalchemy import BaseQuery as Query from werkzeug.exceptions import NotFound +from werkzeug.datastructures import MultiDict from pypnnomenclature.models import TNomenclatures from pypnusershub.db.models import User @@ -94,7 +103,79 @@ class CorObserverSynthese(DB.Model): ) -class SyntheseQuery(GeoFeatureCollectionMixin, BaseQuery): +class SyntheseLogEntryQuery(Query): + sortable_columns = ["meta_last_action_date"] + filterable_columns = ["id_synthese", "last_action", "meta_last_action_date"] + + def filter_by_params(self, params): + for col in self.filterable_columns: + if col not in params: + continue + column = getattr(SyntheseLogEntry, col) + for value in params.getlist(col): + if isinstance(column.type, DateTime): + self = self.filter_by_datetime(column, value) + elif isinstance(column.type, Unicode): + self = self.filter(column.ilike(f"%{value}%")) + else: + self = self.filter(column == value) + return self + + def filter_by_datetime(self, col, dt: str = None): + """Filter on date only with operator among "<,>,=<,>=" + + Parameters + ---------- + filters_with_operator : dict + params filters from url only + + Returns + ------- + Query + + """ + + if ":" in dt: + operator, dt = dt.split(":", 1) + else: + operator = "eq" + dt = datetime.datetime.fromisoformat(dt) + if operator == "gt": + f = col > dt + elif operator == "gte": + f = col >= dt + elif operator == "lt": + f = col < dt + elif operator == "lte": + f = col <= dt + elif operator == "eq": + # FIXME: if datetime is at midnight (looks like date), allows all the day? + f = col == dt + else: + raise ValueError(f"Invalid comparison operator: {operator}") + return self.filter(f) + + def sort(self, columns: List[str]): + if not columns: + columns = ["meta_last_action_date"] + for col in columns: + if ":" in col: + col, direction = col.rsplit(":") + if direction == "asc": + direction = sa.asc + elif direction == "desc": + direction = sa.desc + else: + raise ValueError(f"Invalid sort direction: {direction}") + else: + direction = sa.asc + if col not in self.sortable_columns: + raise ValueError(f"Invalid sort column: {col}") + self = self.order_by(direction(getattr(SyntheseLogEntry, col))) + return self + + +class SyntheseQuery(GeoFeatureCollectionMixin, Query): def join_nomenclatures(self): return self.options(*[joinedload(n) for n in Synthese.nomenclature_fields]) @@ -177,7 +258,7 @@ class Synthese(DB.Model): source = relationship(TSources) id_module = DB.Column(DB.Integer, ForeignKey(TModules.id_module)) module = DB.relationship(TModules) - entity_source_pk_value = DB.Column(DB.Integer) # FIXME varchar in db! + entity_source_pk_value = DB.Column(DB.Unicode) id_dataset = DB.Column(DB.Integer, ForeignKey(TDatasets.id_dataset)) dataset = DB.relationship(TDatasets, backref=DB.backref("synthese_records", lazy="dynamic")) grp_method = DB.Column(DB.Unicode(length=255)) @@ -188,35 +269,43 @@ class Synthese(DB.Model): nomenclature_geo_object_nature = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_geo_object_nature] ) - id_nomenclature_grp_typ = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_grp_typ = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_grp_typ = db.relationship(TNomenclatures, foreign_keys=[id_nomenclature_grp_typ]) id_nomenclature_obs_technique = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_obs_technique = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_obs_technique] ) - id_nomenclature_bio_status = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_bio_status = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_bio_status = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_bio_status] ) id_nomenclature_bio_condition = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_bio_condition = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_bio_condition] ) - id_nomenclature_naturalness = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_naturalness = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_naturalness = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_naturalness] ) id_nomenclature_valid_status = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_valid_status = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_valid_status] ) - id_nomenclature_exist_proof = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_exist_proof = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_exist_proof = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_exist_proof] ) @@ -226,17 +315,25 @@ class Synthese(DB.Model): nomenclature_diffusion_level = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_diffusion_level] ) - id_nomenclature_life_stage = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_life_stage = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_life_stage = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_life_stage] ) - id_nomenclature_sex = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_sex = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_sex = db.relationship(TNomenclatures, foreign_keys=[id_nomenclature_sex]) - id_nomenclature_obj_count = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_obj_count = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_obj_count = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_obj_count] ) - id_nomenclature_type_count = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_type_count = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_type_count = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_type_count] ) @@ -245,39 +342,44 @@ class Synthese(DB.Model): TNomenclatures, foreign_keys=[id_nomenclature_sensitivity] ) id_nomenclature_observation_status = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_observation_status = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_observation_status] ) - id_nomenclature_blurring = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_blurring = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_blurring = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_blurring] ) id_nomenclature_source_status = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_source_status = db.relationship( - TNomenclatures, foreign_keys=[id_nomenclature_source_status] + TNomenclatures, + foreign_keys=[id_nomenclature_source_status], ) id_nomenclature_info_geo_type = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_info_geo_type = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_info_geo_type] ) - id_nomenclature_behaviour = db.Column(db.Integer, ForeignKey(TNomenclatures.id_nomenclature)) + id_nomenclature_behaviour = db.Column( + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() + ) nomenclature_behaviour = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_behaviour] ) id_nomenclature_biogeo_status = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_biogeo_status = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_biogeo_status] ) id_nomenclature_determination_method = db.Column( - db.Integer, ForeignKey(TNomenclatures.id_nomenclature) + db.Integer, ForeignKey(TNomenclatures.id_nomenclature), server_default=FetchedValue() ) nomenclature_determination_method = db.relationship( TNomenclatures, foreign_keys=[id_nomenclature_determination_method] @@ -318,8 +420,8 @@ class Synthese(DB.Model): comment_description = DB.Column(DB.UnicodeText) additional_data = DB.Column(JSONB) meta_validation_date = DB.Column(DB.DateTime) - meta_create_date = DB.Column(DB.DateTime) - meta_update_date = DB.Column(DB.DateTime) + meta_create_date = DB.Column(DB.DateTime, server_default=FetchedValue()) + meta_update_date = DB.Column(DB.DateTime, server_default=FetchedValue()) last_action = DB.Column(DB.Unicode) areas = relationship(LAreas, secondary=corAreaSynthese, backref="synthese_obs") @@ -332,7 +434,7 @@ class Synthese(DB.Model): cor_observers = DB.relationship(User, secondary=cor_observer_synthese) - def has_instance_permission(self, scope): + def _has_scope_grant(self, scope): if scope == 0: return False elif scope in (1, 2): @@ -344,6 +446,31 @@ def has_instance_permission(self, scope): elif scope == 3: return True + def _has_permissions_grant(self, permissions): + if not permissions: + return False + for perm in permissions: + if perm.has_other_filters_than("SCOPE", "SENSITIVITY"): + continue # unsupported filters + if perm.sensitivity_filter and self.nomenclature_sensitivity.cd_nomenclature != "0": + continue # sensitivity filter denied access, check next permission + if perm.scope_value: + if g.current_user == self.digitiser: + return True + if g.current_user in self.cor_observers: + return True + if self.dataset.has_instance_permission(perm.scope_value): + return True + continue # scope filter denied access, check next permission + return True # no filter exclude this permission + return False + + def has_instance_permission(self, permissions): + if type(permissions) == int: + return self._has_scope_grant(permissions) + else: + return self._has_permissions_grant(permissions) + @serializable class DefaultsNomenclaturesValue(DB.Model): @@ -369,6 +496,7 @@ class BibReportsTypes(DB.Model): @serializable class TReport(DB.Model): __tablename__ = "t_reports" + __table_args__ = {"schema": "gn_synthese"} id_report = DB.Column(DB.Integer(), primary_key=True) id_synthese = DB.Column(DB.Integer(), ForeignKey("gn_synthese.synthese.id_synthese")) @@ -539,6 +667,24 @@ class VColorAreaTaxon(DB.Model): color = DB.Column(DB.Unicode()) +@serializable +class SyntheseLogEntry(DB.Model): + """Log synthese table, populated with Delete Triggers on gn_synthes.synthese + Parameters + ---------- + DB: + Flask SQLAlchemy controller + """ + + __tablename__ = "t_log_synthese" + __table_args__ = {"schema": "gn_synthese"} + query_class = SyntheseLogEntryQuery + + id_synthese = DB.Column(DB.Integer(), primary_key=True) + last_action = DB.Column(DB.String(length=1)) + meta_last_action_date = DB.Column(DB.DateTime) + + # defined here to avoid circular dependencies source_subquery = ( select([TSources.id_source, Synthese.id_dataset]) @@ -553,3 +699,10 @@ class VColorAreaTaxon(DB.Model): secondary=source_subquery, viewonly=True, ) +TDatasets.synthese_records_count = column_property( + select([func.count(Synthese.id_synthese)]) + .where(Synthese.id_dataset == TDatasets.id_dataset) + .as_scalar() # deprecated, replace with scalar_subquery() + .label("synthese_records_count"), + deferred=True, +) diff --git a/backend/geonature/core/gn_synthese/routes.py b/backend/geonature/core/gn_synthese/routes.py index 68ae774025..edf04e4722 100644 --- a/backend/geonature/core/gn_synthese/routes.py +++ b/backend/geonature/core/gn_synthese/routes.py @@ -1,6 +1,5 @@ import json import datetime -import time import re from collections import OrderedDict from warnings import warn @@ -16,21 +15,24 @@ g, ) from werkzeug.exceptions import Forbidden, NotFound, BadRequest, Conflict -from sqlalchemy import distinct, func, desc, asc, select, text, update -from sqlalchemy.orm import joinedload, contains_eager, lazyload, selectinload +from werkzeug.datastructures import MultiDict +from sqlalchemy import distinct, func, desc, asc, select, case +from sqlalchemy.orm import joinedload, lazyload, selectinload from geojson import FeatureCollection, Feature import sqlalchemy as sa +from sqlalchemy.orm import load_only, aliased, Load from utils_flask_sqla.generic import serializeQuery, GenericTable from utils_flask_sqla.response import to_csv_resp, to_json_resp, json_resp from utils_flask_sqla_geo.generic import GenericTableGeo from geonature.utils import filemanager -from geonature.utils.env import DB +from geonature.utils.env import db, DB from geonature.utils.errors import GeonatureApiError from geonature.utils.utilsgeometrytools import export_as_geo_file from geonature.core.gn_meta.models import TDatasets +from geonature.core.notifications.utils import dispatch_notifications from geonature.core.gn_synthese.models import ( BibReportsTypes, @@ -41,16 +43,15 @@ VSyntheseForWebApp, VColorAreaTaxon, TReport, + SyntheseLogEntry, ) from geonature.core.gn_synthese.synthese_config import MANDATORY_COLUMNS from geonature.core.gn_synthese.utils.query_select_sqla import SyntheseQuery from geonature.core.gn_permissions import decorators as permissions -from geonature.core.gn_permissions.tools import ( - cruved_scope_for_user_in_module, - get_scopes_by_action, -) +from geonature.core.gn_permissions.decorators import login_required, permissions_required +from geonature.core.gn_permissions.tools import get_scopes_by_action, get_permissions from ref_geo.models import LAreas, BibAreasTypes @@ -76,8 +77,8 @@ @routes.route("/for_web", methods=["GET", "POST"]) -@permissions.check_cruved_scope("R", True, module_code="SYNTHESE") -def get_observations_for_web(info_role): +@permissions_required("R", module_code="SYNTHESE") +def get_observations_for_web(permissions): """Optimized route to serve data for the frontend with all filters. .. :quickref: Synthese; Get filtered observations @@ -100,7 +101,6 @@ def get_observations_for_web(info_role): geojson = json.loads(r["st_asgeojson"]) geojson["properties"] = properties - :param str info_role: Role used to get the associated filters, **TBC** :qparam str limit: Limit number of synthese returned. Defaults to NB_MAX_OBS_MAP. :qparam str cd_ref_parent: filtre tous les taxons enfants d'un TAXREF cd_ref. :qparam str cd_ref: Filter by TAXREF cd_ref attribute @@ -123,119 +123,135 @@ def get_observations_for_web(info_role): :>jsonarr int nb_total: Number of observations :>jsonarr bool nb_obs_limited: Is number of observations capped """ - if request.is_json: - filters = request.json - elif request.data: - #  decode byte to str - compat python 3.5 - filters = json.loads(request.data.decode("utf-8")) - else: - filters = {key: request.args.get(key) for key, value in request.args.items()} + filters = request.json if request.is_json else {} + if type(filters) != dict: + raise BadRequest("Bad filters") + result_limit = request.args.get( + "limit", current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"], type=int + ) + output_format = request.args.get("format", "ungrouped_geom") + if output_format not in ["ungrouped_geom", "grouped_geom", "grouped_geom_by_areas"]: + raise BadRequest(f"Bad format '{output_format}'") - if "limit" in filters: - result_limit = filters.pop("limit") - else: - result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] + # Build defaut CTE observations query + count_min_max = case( + [ + ( + VSyntheseForWebApp.count_min != VSyntheseForWebApp.count_max, + func.concat(VSyntheseForWebApp.count_min, " - ", VSyntheseForWebApp.count_max), + ), + (VSyntheseForWebApp.count_min != None, func.concat(VSyntheseForWebApp.count_min)), + ], + else_="", + ) - query = ( - select( - [ - VSyntheseForWebApp.id_synthese, - VSyntheseForWebApp.date_min, - VSyntheseForWebApp.lb_nom, - VSyntheseForWebApp.cd_nom, - VSyntheseForWebApp.nom_vern, - VSyntheseForWebApp.count_min, - VSyntheseForWebApp.count_max, - VSyntheseForWebApp.st_asgeojson, - VSyntheseForWebApp.observers, - VSyntheseForWebApp.dataset_name, - VSyntheseForWebApp.url_source, - VSyntheseForWebApp.entity_source_pk_value, - VSyntheseForWebApp.unique_id_sinp, - ] - ) + nom_vern_or_lb_nom = func.coalesce( + func.nullif(VSyntheseForWebApp.nom_vern, ""), VSyntheseForWebApp.lb_nom + ) + + columns = [ + "id", + VSyntheseForWebApp.id_synthese, + "date_min", + VSyntheseForWebApp.date_min, + "lb_nom", + VSyntheseForWebApp.lb_nom, + "cd_nom", + VSyntheseForWebApp.cd_nom, + "observers", + VSyntheseForWebApp.observers, + "dataset_name", + VSyntheseForWebApp.dataset_name, + "url_source", + VSyntheseForWebApp.url_source, + "unique_id_sinp", + VSyntheseForWebApp.unique_id_sinp, + "nom_vern_or_lb_nom", + nom_vern_or_lb_nom, + "count_min_max", + count_min_max, + "entity_source_pk_value", + VSyntheseForWebApp.entity_source_pk_value, + ] + observations = func.json_build_object(*columns).label("obs_as_json") + + obs_query = ( + # select([VSyntheseForWebApp.id_synthese, observations]) + select([observations]) .where(VSyntheseForWebApp.the_geom_4326.isnot(None)) .order_by(VSyntheseForWebApp.date_min.desc()) + .limit(result_limit) ) - synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) - synthese_query_class.filter_query_all_filters(info_role) - result = DB.session.execute(synthese_query_class.query.limit(result_limit)) - geojson_features = [] - for r in result: - properties = { - "id": r["id_synthese"], - "date_min": str(r["date_min"]), - "cd_nom": r["cd_nom"], - "nom_vern_or_lb_nom": r["nom_vern"] if r["nom_vern"] else r["lb_nom"], - "lb_nom": r["lb_nom"], - "count_min_max": "{} - {}".format(r["count_min"], r["count_max"]) - if r["count_min"] != r["count_max"] - else str(r["count_min"] or ""), - "dataset_name": r["dataset_name"], - "observers": r["observers"], - "url_source": r["url_source"], - "unique_id_sinp": str(r["unique_id_sinp"]), - "entity_source_pk_value": r["entity_source_pk_value"], - } - geometry = json.loads(r["st_asgeojson"]) - geojson_features.append( - Feature( - geometry=geometry, - properties=properties, + + # Add filters to observations CTE query + synthese_query_class = SyntheseQuery( + VSyntheseForWebApp, + obs_query, + filters, + ) + synthese_query_class.filter_query_all_filters(g.current_user, permissions) + obs_query = synthese_query_class.query + + if output_format == "grouped_geom_by_areas": + # SQLAlchemy 1.4: replace column by add_columns + obs_query = obs_query.column(VSyntheseForWebApp.id_synthese).cte("OBS") + agg_areas = ( + select([CorAreaSynthese.id_synthese, LAreas.id_area]) + .select_from( + CorAreaSynthese.__table__.join( + LAreas, LAreas.id_area == CorAreaSynthese.id_area + ).join( + BibAreasTypes, + BibAreasTypes.id_type == LAreas.id_type, + ), + ) + .where(CorAreaSynthese.id_synthese == VSyntheseForWebApp.id_synthese) + .where( + BibAreasTypes.type_code == current_app.config["SYNTHESE"]["AREA_AGGREGATION_TYPE"] ) + .lateral("agg_areas") + ) + obs_query = ( + select([LAreas.geojson_4326.label("geojson"), obs_query.c.obs_as_json]) + .select_from( + obs_query.outerjoin( + agg_areas, agg_areas.c.id_synthese == obs_query.c.id_synthese + ).outerjoin(LAreas, LAreas.id_area == agg_areas.c.id_area) + ) + .cte("OBSERVATIONS") ) - return jsonify(FeatureCollection(geojson_features)) - - -@routes.route("", methods=["GET"]) -@permissions.check_cruved_scope("R", True, module_code="SYNTHESE") -@json_resp -def get_synthese(info_role): - """Return synthese row(s) filtered by form params. NOT USED ANY MORE FOR PERFORMANCE ISSUES - - .. :quickref: Synthese; Deprecated - - .. deprecated:: 2? - Use :route: /for_web instead - - Params must have same synthese fields names - - :parameter str info_role: Role used to get the associated filters - :returns dict[dict, int, bool]: See description above - """ - # change all args in a list of value - filters = {key: request.args.getlist(key) for key, value in request.args.items()} - if "limit" in filters: - result_limit = filters.pop("limit")[0] else: - result_limit = current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"] + # SQLAlchemy 1.4: replace column by add_columns + obs_query = obs_query.column(VSyntheseForWebApp.st_asgeojson.label("geojson")).cte( + "OBSERVATIONS" + ) - query = select([VSyntheseForWebApp]).order_by(VSyntheseForWebApp.date_min.desc()) - synthese_query_class = SyntheseQuery(VSyntheseForWebApp, query, filters) - synthese_query_class.filter_query_all_filters(info_role) - data = DB.engine.execute(synthese_query_class.query.limit(result_limit)) + if output_format == "ungrouped_geom": + query = select([obs_query.c.geojson, obs_query.c.obs_as_json]) + else: + # Group geometries with main query + grouped_properties = func.json_build_object( + "observations", func.json_agg(obs_query.c.obs_as_json).label("observations") + ) + query = select([obs_query.c.geojson, grouped_properties]).group_by(obs_query.c.geojson) - # q = synthese_query.filter_query_all_filters(VSyntheseForWebApp, q, filters, info_role) + results = DB.session.execute(query) - # data = q.limit(result_limit) - columns = current_app.config["SYNTHESE"]["COLUMNS_API_SYNTHESE_WEB_APP"] + MANDATORY_COLUMNS - features = [] - for d in data: - feature = d.as_geofeature(fields=columns) - feature["properties"]["nom_vern_or_lb_nom"] = ( - d.lb_nom if d.nom_vern is None else d.nom_vern + # Build final GeoJson + geojson_features = [] + for geom_as_geojson, properties in results: + geojson_features.append( + Feature( + geometry=json.loads(geom_as_geojson) if geom_as_geojson else None, + properties=properties, + ) ) - features.append(feature) - return { - "data": FeatureCollection(features), - "nb_obs_limited": len(features) == current_app.config["SYNTHESE"]["NB_MAX_OBS_MAP"], - "nb_total": len(features), - } + return jsonify(FeatureCollection(geojson_features)) @routes.route("/vsynthese/", methods=["GET"]) -@permissions.check_cruved_scope("R", get_scope=True, module_code="SYNTHESE") -def get_one_synthese(scope, id_synthese): +@permissions_required("R", module_code="SYNTHESE") +def get_one_synthese(permissions, id_synthese): """Get one synthese record for web app with all decoded nomenclature""" synthese = Synthese.query.join_nomenclatures().options( joinedload("dataset").options( @@ -294,7 +310,7 @@ def get_one_synthese(scope, id_synthese): synthese = synthese.get_or_404(id_synthese) - if not synthese.has_instance_permission(scope=scope): + if not synthese.has_instance_permission(permissions=permissions): raise Forbidden() geofeature = synthese.as_geofeature(fields=Synthese.nomenclature_fields + fields) @@ -307,8 +323,8 @@ def get_one_synthese(scope, id_synthese): @routes.route("/export_taxons", methods=["POST"]) -@permissions.check_cruved_scope("E", True, module_code="SYNTHESE") -def export_taxon_web(info_role): +@permissions_required("E", module_code="SYNTHESE") +def export_taxon_web(permissions): """Optimized route for taxon web export. .. :quickref: Synthese; @@ -328,10 +344,10 @@ def export_taxon_web(info_role): engine=DB.engine, ) columns = taxon_view.tableDef.columns + # Test de conformité de la vue v_synthese_for_export_view try: assert hasattr(taxon_view.tableDef.columns, "cd_ref") - except AssertionError as e: return ( { @@ -348,8 +364,6 @@ def export_taxon_web(info_role): id_list = request.get_json() - # check R and E CRUVED to know if we filter with cruved - cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] sub_query = ( select( [ @@ -369,15 +383,14 @@ def export_taxon_web(info_role): {}, ) - if cruved["R"] > cruved["E"]: - # filter on cruved - synthese_query_class.filter_query_with_cruved(info_role) + synthese_query_class.filter_query_all_filters(g.current_user, permissions) subq = synthese_query_class.query.alias("subq") q = DB.session.query(*columns, subq.c.nb_obs, subq.c.date_min, subq.c.date_max).join( subq, subq.c.cd_ref == columns.cd_ref ) + return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), data=serializeQuery(q.all(), q.column_descriptions), @@ -387,8 +400,8 @@ def export_taxon_web(info_role): @routes.route("/export_observations", methods=["POST"]) -@permissions.check_cruved_scope("E", True, module_code="SYNTHESE") -def export_observations_web(info_role): +@permissions_required("E", module_code="SYNTHESE") +def export_observations_web(permissions): """Optimized route for observations web export. .. :quickref: Synthese; @@ -402,13 +415,28 @@ def export_observations_web(info_role): """ params = request.args + # set default to csv export_format = params.get("export_format", "csv") # Test export_format if not export_format in current_app.config["SYNTHESE"]["EXPORT_FORMAT"]: raise BadRequest("Unsupported format") + # get list of id synthese from POST + id_list = request.get_json() + + # Get the SRID for the export srid = DB.session.execute(func.Find_SRID("gn_synthese", "synthese", "the_geom_local")).scalar() - # set default to csv + + # Get the CTE for synthese filtered by user permissions + synthese_query_class = SyntheseQuery( + Synthese, + select([Synthese.id_synthese]), + {}, + ) + synthese_query_class.filter_query_all_filters(g.current_user, permissions) + cte_synthese_filtered = synthese_query_class.build_query().cte("cte_synthese_filtered") + + # Get the view for export export_view = GenericTableGeo( tableName="v_synthese_for_export", schemaName="gn_synthese", @@ -417,8 +445,26 @@ def export_observations_web(info_role): srid=srid, ) - # get list of id synthese from POST - id_list = request.get_json() + # Get the query for export + export_query = ( + select([export_view.tableDef]) + .select_from( + export_view.tableDef.join( + cte_synthese_filtered, + cte_synthese_filtered.c.id_synthese == export_view.tableDef.c.id_synthese, + ) + ) + .where( + export_view.tableDef.columns[ + current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"] + ].in_(id_list) + ) + ) + + # Get the results for export + results = DB.session.execute( + export_query.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) + ) db_cols_for_shape = [] columns_to_serialize = [] @@ -428,37 +474,12 @@ def export_observations_web(info_role): db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) - query = select([export_view.tableDef]).where( - export_view.tableDef.columns[current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"]].in_( - id_list - ) - ) - synthese_query_class = SyntheseQuery( - export_view.tableDef, - query, - {}, - id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"], - id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"], - observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"], - id_digitiser_column=current_app.config["SYNTHESE"]["EXPORT_ID_DIGITISER_COL"], - with_generic_table=True, - ) - # check R and E CRUVED to know if we filter with cruved - cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[0] - if cruved["R"] > cruved["E"]: - synthese_query_class.filter_query_with_cruved(info_role) - - results = DB.session.execute( - synthese_query_class.query.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"]) - ) - file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") file_name = filemanager.removeDisallowedFilenameChars(file_name) if export_format == "csv": formated_data = [export_view.as_dict(d, columns=columns_to_serialize) for d in results] return to_csv_resp(file_name, formated_data, separator=";", columns=columns_to_serialize) - elif export_format == "geojson": features = [] for r in results: @@ -494,9 +515,10 @@ def export_observations_web(info_role): ) +# TODO: Change the following line to set method as "POST" only ? @routes.route("/export_metadata", methods=["GET", "POST"]) -@permissions.check_cruved_scope("E", True, module_code="SYNTHESE") -def export_metadata(info_role): +@permissions_required("E", module_code="SYNTHESE") +def export_metadata(permissions): """Route to export the metadata in CSV .. :quickref: Synthese; @@ -504,27 +526,41 @@ def export_metadata(info_role): The table synthese is join with gn_synthese.v_metadata_for_export The column jdd_id is mandatory in the view gn_synthese.v_metadata_for_export + TODO: Remove the following comment line ? or add the where clause for id_synthese in id_list ? POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view """ - if request.is_json: - filters = request.json - else: - filters = {key: request.args.getlist(key) for key, value in request.args.items()} + filters = request.json if request.is_json else {} metadata_view = GenericTable( - tableName="v_metadata_for_export", schemaName="gn_synthese", engine=DB.engine + tableName="v_metadata_for_export", + schemaName="gn_synthese", + engine=DB.engine, ) - q = DB.session.query(distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef).join( - metadata_view.tableDef, - getattr( - metadata_view.tableDef.columns, - current_app.config["SYNTHESE"]["EXPORT_METADATA_ID_DATASET_COL"], + + # Test de conformité de la vue v_metadata_for_export + try: + assert hasattr(metadata_view.tableDef.columns, "jdd_id") + except AssertionError as e: + return ( + { + "msg": """ + View v_metadata_for_export + must have a jdd_id column \n + trace: {} + """.format( + str(e) + ) + }, + 500, ) - == VSyntheseForWebApp.id_dataset, - ) q = select([distinct(VSyntheseForWebApp.id_dataset), metadata_view.tableDef]) - synthese_query_class = SyntheseQuery(VSyntheseForWebApp, q, filters) + + synthese_query_class = SyntheseQuery( + VSyntheseForWebApp, + q, + filters, + ) synthese_query_class.add_join( metadata_view.tableDef, getattr( @@ -533,20 +569,32 @@ def export_metadata(info_role): ), VSyntheseForWebApp.id_dataset, ) - synthese_query_class.filter_query_all_filters(info_role) - data = DB.engine.execute(synthese_query_class.query) + # Filter query with permissions (scope, sensitivity, ...) + synthese_query_class.filter_query_all_filters(g.current_user, permissions) + + data = DB.session.execute(synthese_query_class.query) + + # Define the header of the csv file + columns = [db_col.key for db_col in metadata_view.tableDef.columns] + columns[columns.index("nombre_obs")] = "nombre_total_obs" + + # Retrieve the data to write in the csv file + data = [metadata_view.as_dict(d) for d in data] + for d in data: + d["nombre_total_obs"] = d.pop("nombre_obs") + return to_csv_resp( datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S"), - data=[metadata_view.as_dict(d) for d in data], + data=data, separator=";", - columns=[db_col.key for db_col in metadata_view.tableDef.columns], + columns=columns, ) @routes.route("/export_statuts", methods=["POST"]) -@permissions.check_cruved_scope("E", True, module_code="SYNTHESE") -def export_status(info_role): +@permissions_required("E", module_code="SYNTHESE") +def export_status(permissions): """Route to get all the protection status of a synthese search .. :quickref: Synthese; @@ -557,10 +605,7 @@ def export_status(info_role): Parameters: - HTTP-GET: the same that the /synthese endpoint (all the filter in web app) """ - if request.is_json: - filters = request.json - else: - filters = {key: request.args.getlist(key) for key, value in request.args.items()} + filters = request.json if request.is_json else {} # Initalize the select object q = select( @@ -583,7 +628,8 @@ def export_status(info_role): # Initialize SyntheseQuery class synthese_query = SyntheseQuery(VSyntheseForWebApp, q, filters) - synthese_query.apply_all_filters(info_role) + # Filter query with permissions + synthese_query.filter_query_all_filters(g.current_user, permissions) # Add join synthese_query.add_join(Taxref, Taxref.cd_nom, VSyntheseForWebApp.cd_nom) @@ -626,7 +672,7 @@ def export_status(info_role): q = q.where(TaxrefBdcStatutText.enable == True) protection_status = [] - data = DB.engine.execute(q) + data = DB.session.execute(q) for d in data: row = OrderedDict( [ @@ -675,9 +721,9 @@ def export_status(info_role): @routes.route("/general_stats", methods=["GET"]) -@permissions.check_cruved_scope("R", True, module_code="SYNTHESE") +@permissions_required("R", module_code="SYNTHESE") @json_resp -def general_stats(info_role): +def general_stats(permissions): """Return stats about synthese. .. :quickref: Synthese; @@ -696,7 +742,7 @@ def general_stats(info_role): ] ) synthese_query_obj = SyntheseQuery(Synthese, q, {}) - synthese_query_obj.filter_query_with_cruved(info_role) + synthese_query_obj.filter_query_with_cruved(g.current_user, permissions) result = DB.session.execute(synthese_query_obj.query) synthese_counts = result.fetchone() @@ -710,6 +756,7 @@ def general_stats(info_role): @routes.route("/taxons_tree", methods=["GET"]) +@login_required @json_resp def get_taxon_tree(): """Get taxon tree. @@ -724,6 +771,7 @@ def get_taxon_tree(): @routes.route("/taxons_autocomplete", methods=["GET"]) +@login_required @json_resp def get_autocomplete_taxons_synthese(): """Autocomplete taxon for web search (based on all taxon in Synthese). @@ -740,7 +788,7 @@ def get_autocomplete_taxons_synthese(): q = ( DB.session.query( VMTaxrefListForautocomplete, - func.similarity(VMTaxrefListForautocomplete.search_name, search_name).label( + func.similarity(VMTaxrefListForautocomplete.unaccent_search_name, search_name).label( "idx_trgm" ), ) @@ -748,7 +796,11 @@ def get_autocomplete_taxons_synthese(): .join(Synthese, Synthese.cd_nom == VMTaxrefListForautocomplete.cd_nom) ) search_name = search_name.replace(" ", "%") - q = q.filter(VMTaxrefListForautocomplete.search_name.ilike("%" + search_name + "%")) + q = q.filter( + VMTaxrefListForautocomplete.unaccent_search_name.ilike( + func.unaccent("%" + search_name + "%") + ) + ) regne = request.args.get("regne") if regne: q = q.filter(VMTaxrefListForautocomplete.regne == regne) @@ -756,7 +808,6 @@ def get_autocomplete_taxons_synthese(): group2_inpn = request.args.get("group2_inpn") if group2_inpn: q = q.filter(VMTaxrefListForautocomplete.group2_inpn == group2_inpn) - q = q.order_by(desc(VMTaxrefListForautocomplete.cd_nom == VMTaxrefListForautocomplete.cd_ref)) limit = request.args.get("limit", 20) data = q.order_by(desc("idx_trgm")).limit(20).all() @@ -764,6 +815,7 @@ def get_autocomplete_taxons_synthese(): @routes.route("/sources", methods=["GET"]) +@login_required @json_resp def get_sources(): """Get all sources. @@ -776,6 +828,7 @@ def get_sources(): @routes.route("/defaultsNomenclatures", methods=["GET"]) +@login_required def getDefaultsNomenclatures(): """Get default nomenclatures @@ -812,6 +865,7 @@ def getDefaultsNomenclatures(): @routes.route("/color_taxon", methods=["GET"]) +@login_required def get_color_taxon(): """Get color of taxon in areas (vue synthese.v_color_taxon_area). @@ -855,6 +909,7 @@ def get_color_taxon(): @routes.route("/taxa_count", methods=["GET"]) +@login_required @json_resp def get_taxa_count(): """ @@ -881,6 +936,7 @@ def get_taxa_count(): @routes.route("/observation_count", methods=["GET"]) +@login_required @json_resp def get_observation_count(): """ @@ -909,9 +965,10 @@ def get_observation_count(): @routes.route("/observations_bbox", methods=["GET"]) +@login_required def get_bbox(): """ - Get bbbox of observations + Get bbox of observations .. :quickref: Synthese; @@ -939,8 +996,16 @@ def get_bbox(): @routes.route("/observation_count_per_column/", methods=["GET"]) +@login_required def observation_count_per_column(column): - """Get observations count group by a given column""" + """ + Get observations count group by a given column + + This function was used to count observations per dataset, + but this usage have been replaced by + TDatasets.synthese_records_count. + Remove this function as it is very inefficient? + """ if column not in sa.inspect(Synthese).column_attrs: raise BadRequest(f"No column name {column} in Synthese") synthese_column = getattr(Synthese, column) @@ -956,7 +1021,7 @@ def observation_count_per_column(column): @routes.route("/taxa_distribution", methods=["GET"]) -@json_resp +@login_required def get_taxa_distribution(): """ Get taxa distribution for a given dataset or acquisition framework @@ -996,13 +1061,13 @@ def get_taxa_distribution(): query = query.filter(Synthese.id_source == id_source) data = query.group_by(rank).all() - return [{"count": d[0], "group": d[1]} for d in data] + return jsonify([{"count": d[0], "group": d[1]} for d in data]) @routes.route("/reports", methods=["POST"]) -@permissions.check_cruved_scope("R", get_scope=True, module_code="SYNTHESE") +@permissions_required("R", module_code="SYNTHESE") @json_resp -def create_report(scope): +def create_report(permissions): """ Create a report (e.g report) for a given synthese id @@ -1019,28 +1084,46 @@ def create_report(scope): type_name = data["type"] id_synthese = data["item"] content = data["content"] - if not id_synthese: - raise BadRequest("id_synthese is missing from the request") - if not type_name: - raise BadRequest("Report type is missing from the request") - if not content and type_name == "discussion": - raise BadRequest("Discussion content is required") - type_exists = BibReportsTypes.query.filter_by(type=type_name).first() - if not type_exists: - raise BadRequest("This report type does not exist") - synthese = Synthese.query.get_or_404(id_synthese) - if not synthese.has_instance_permission(scope): - raise Forbidden - # only allow one alert by id_synthese - if type_name in ["alert", "pin"]: - alert_exists = TReport.query.filter( - TReport.id_synthese == id_synthese, - TReport.report_type.has(BibReportsTypes.type == type_name), - ).one_or_none() - if alert_exists is not None: - raise Conflict("This type already exists for this id") except KeyError: raise BadRequest("Empty request data") + if not id_synthese: + raise BadRequest("id_synthese is missing from the request") + if not type_name: + raise BadRequest("Report type is missing from the request") + if not content and type_name == "discussion": + raise BadRequest("Discussion content is required") + type_exists = BibReportsTypes.query.filter_by(type=type_name).first() + if not type_exists: + raise BadRequest("This report type does not exist") + + synthese = Synthese.query.options( + Load(Synthese).raiseload("*"), + joinedload("cor_observers"), + joinedload("digitiser"), + joinedload("dataset"), + ).get_or_404(id_synthese) + if not synthese.has_instance_permission(permissions): + raise Forbidden + + report_query = TReport.query.filter( + TReport.id_synthese == id_synthese, + TReport.report_type.has(BibReportsTypes.type == type_name), + ) + + user_pin = TReport.query.filter( + TReport.id_synthese == id_synthese, + TReport.report_type.has(BibReportsTypes.type == "pin"), + TReport.id_role == g.current_user.id_role, + ) + # only allow one alert by id_synthese + if type_name in ["alert"]: + alert_exists = report_query.one_or_none() + if alert_exists is not None: + raise Conflict("This type already exists for this id") + if type_name in ["pin"]: + pin_exist = user_pin.one_or_none() + if pin_exist is not None: + raise Conflict("This type already exists for this id") new_entry = TReport( id_synthese=id_synthese, id_role=g.current_user.id_role, @@ -1049,11 +1132,45 @@ def create_report(scope): id_type=type_exists.id_type, ) session.add(new_entry) + + if type_name == "discussion": + # Get the observers of the observation + observers = {observer.id_role for observer in synthese.cor_observers} + # Get the users that commented the observation + commenters = { + report.id_role + for report in report_query.filter( + TReport.id_role.notin_({synthese.id_digitiser} | observers) + ).distinct(TReport.id_role) + } + # The id_roles are the Union between observers and commenters + id_roles = observers | commenters | {synthese.id_digitiser} + # Remove the user that just commented the obs not to notify him/her + id_roles.discard(g.current_user.id_role) + notify_new_report_change( + synthese=synthese, user=g.current_user, id_roles=id_roles, content=content + ) session.commit() +def notify_new_report_change(synthese, user, id_roles, content): + if not synthese.id_digitiser: + return + dispatch_notifications( + code_categories=["OBSERVATION-COMMENT"], + id_roles=id_roles, + title="Nouveau commentaire sur une observation", + url=( + current_app.config["URL_APPLICATION"] + + "/#/synthese/occurrence/" + + str(synthese.id_synthese) + ), + context={"synthese": synthese, "user": user, "content": content}, + ) + + @routes.route("/reports/", methods=["PUT"]) -@permissions.login_required +@login_required @json_resp def update_content_report(id_report): """ @@ -1075,14 +1192,14 @@ def update_content_report(id_report): @routes.route("/reports", methods=["GET"]) -@permissions.check_cruved_scope("R", get_scope=True, module_code="SYNTHESE") -def list_reports(scope): +@permissions_required("R", module_code="SYNTHESE") +def list_reports(permissions): type_name = request.args.get("type") id_synthese = request.args.get("idSynthese") sort = request.args.get("sort") # VERIFY ID SYNTHESE synthese = Synthese.query.get_or_404(id_synthese) - if not synthese.has_instance_permission(scope): + if not synthese.has_instance_permission(permissions): raise Forbidden # START REQUEST req = TReport.query.filter(TReport.id_synthese == id_synthese) @@ -1125,14 +1242,14 @@ def list_reports(scope): @routes.route("/reports/", methods=["DELETE"]) -@permissions.login_required +@login_required @json_resp def delete_report(id_report): reportItem = TReport.query.get_or_404(id_report) # alert control to check cruved - allow validators only if reportItem.report_type.type in ["alert"]: - scope = get_scopes_by_action(module_code="VALIDATION")["C"] - if not reportItem.synthese.has_instance_permission(scope): + permissions = get_permissions(module_code="SYNTHESE", action_code="R") + if not reportItem.synthese.has_instance_permission(permissions): raise Forbidden("Permission required to delete this report !") # only owner could delete a report for pin and discussion if reportItem.id_role != g.current_user.id_role and reportItem.report_type.type in [ @@ -1147,3 +1264,65 @@ def delete_report(id_report): else: DB.session.delete(reportItem) DB.session.commit() + + +@routes.route("/log", methods=["get"]) +@login_required +def list_synthese_log_entries() -> dict: + """Get log history from synthese + + Parameters + ---------- + + Returns + ------- + dict + log action list + """ + + deletion_entries = SyntheseLogEntry.query.options( + load_only( + SyntheseLogEntry.id_synthese, + SyntheseLogEntry.last_action, + SyntheseLogEntry.meta_last_action_date, + ) + ) + create_update_entries = Synthese.query.with_entities( + Synthese.id_synthese, + db.case( + [ + (Synthese.meta_create_date < Synthese.meta_update_date, "U"), + ], + else_="I", + ).label("last_action"), + func.coalesce(Synthese.meta_update_date, Synthese.meta_create_date).label( + "meta_last_action_date" + ), + ) + query = deletion_entries.union(create_update_entries) + + # Filter + try: + query = query.filter_by_params(request.args) + except ValueError as exc: + raise BadRequest(*exc.args) from exc + + # Sort + try: + query = query.sort(request.args.getlist("sort")) + except ValueError as exc: + raise BadRequest(*exc.args) from exc + + # Paginate + limit = request.args.get("limit", type=int, default=50) + page = request.args.get("page", type=int, default=1) + results = query.paginate(page=page, per_page=limit, error_out=False) + + return jsonify( + { + "items": [item.as_dict() for item in results.items], + "total": results.total, + "limit": limit, + "page": page, + } + ) diff --git a/backend/geonature/core/gn_synthese/utils/process.py b/backend/geonature/core/gn_synthese/utils/process.py index 3581eb3396..1c1c7c7a51 100644 --- a/backend/geonature/core/gn_synthese/utils/process.py +++ b/backend/geonature/core/gn_synthese/utils/process.py @@ -13,7 +13,6 @@ def import_from_table(schema_name, table_name, field_name, value, limit=50): for all rows satisfying the condition : = """ try: - # TODO get nb txt = """SELECT COUNT(*) FROM {}.{} WHERE {}::varchar = '{}'""".format( schema_name, table_name, field_name, value @@ -26,7 +25,6 @@ def import_from_table(schema_name, table_name, field_name, value, limit=50): # on procède ici par boucle pour traiter un nombre raisonnable de donnée à la fois while limit * i < nb_data: - txt = """SELECT gn_synthese.import_row_from_table( '{}', '{}', diff --git a/backend/geonature/core/gn_synthese/utils/query_select_sqla.py b/backend/geonature/core/gn_synthese/utils/query_select_sqla.py index 24b5f8365b..963bac7a89 100644 --- a/backend/geonature/core/gn_synthese/utils/query_select_sqla.py +++ b/backend/geonature/core/gn_synthese/utils/query_select_sqla.py @@ -1,20 +1,22 @@ """ -Utility function to manage cruved and all filter of Synthese +Utility function to manage permissions and all filter of Synthese Use these functions rather than query.py Filter the query of synthese using SQLA expression language and 'select' object https://docs.sqlalchemy.org/en/latest/core/tutorial.html#selecting much more efficient """ import datetime +import unicodedata import uuid from flask import current_app -from sqlalchemy import func, or_, and_, select +import sqlalchemy as sa +from sqlalchemy import func, or_, and_, select, distinct from sqlalchemy.sql import text from sqlalchemy.orm import aliased -from shapely.wkt import loads from werkzeug.exceptions import BadRequest +from shapely.geometry import shape from geoalchemy2.shape import from_shape from geonature.utils.env import DB @@ -23,6 +25,7 @@ CorAreaSynthese, BibReportsTypes, TReport, + TSources, ) from geonature.core.gn_meta.models import ( CorDatasetActor, @@ -38,6 +41,9 @@ TaxrefBdcStatutText, TaxrefBdcStatutValues, ) +from ref_geo.models import LAreas, BibAreasTypes +from utils_flask_sqla_geo.schema import FeatureSchema, FeatureCollectionSchema +from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes class SyntheseQuery: @@ -66,13 +72,6 @@ def __init__( ): self.query = query - # Passage de l'ensemble des filtres - # en array pour des questions de compatibilité - # TODO voir si ça ne peut pas être modifié - for k in filters.keys(): - if not isinstance(filters[k], list): - filters[k] = [filters[k]] - self.filters = filters self.first = query_joins is None self.model = model @@ -129,11 +128,65 @@ def add_join_multiple_cond(self, right_table, conditions): # push the joined table in _already_joined_table list self._already_joined_table.append(right_table) - def filter_query_with_cruved(self, user): + def filter_query_with_permissions(self, user, permissions): + """ + Filter the query with the permissions of a user + """ + subquery_observers = ( + select([CorObserverSynthese.id_synthese]) + .select_from(CorObserverSynthese) + .where(CorObserverSynthese.id_role == user.id_role) + ) + datasets_by_scope = {} # to avoid fetching datasets several time for same scope + permissions_filters = [] + nomenclature_non_sensible = None + for perm in permissions: + if perm.has_other_filters_than("SCOPE", "SENSITIVITY"): + continue + perm_filters = [] + if perm.sensitivity_filter: + if nomenclature_non_sensible is None: + nomenclature_non_sensible = ( + TNomenclatures.query.filter( + TNomenclatures.nomenclature_type.has( + BibNomenclaturesTypes.mnemonique == "SENSIBILITE" + ) + ) + .filter(TNomenclatures.cd_nomenclature == "0") + .one() + ) + perm_filters.append( + self.model.id_nomenclature_sensitivity + == nomenclature_non_sensible.id_nomenclature + ) + if perm.scope_value: + if perm.scope_value not in datasets_by_scope: + datasets_by_scope[perm.scope_value] = [ + d.id_dataset + for d in TDatasets.query.filter_by_scope(perm.scope_value).all() + ] + datasets = datasets_by_scope[perm.scope_value] + scope_filters = [ + self.model_id_syn_col.in_(subquery_observers), # user is observer + self.model_id_digitiser_column == user.id_role, # user id digitizer + self.model_id_dataset_column.in_( + datasets + ), # user is dataset (or parent af) actor + ] + perm_filters.append(or_(*scope_filters)) + if perm_filters: + permissions_filters.append(and_(*perm_filters)) + else: + permissions_filters.append(sa.true()) + if permissions_filters: + self.query = self.query.where(or_(*permissions_filters)) + else: + self.query = self.query.where(sa.false()) + + def filter_query_with_cruved(self, user, scope): """ Filter the query with the cruved authorization of a user """ - scope = int(user.value_filter) if scope in (1, 2): # get id synthese where user is observer subquery_observers = ( @@ -194,6 +247,9 @@ def filter_taxonomy(self): ) aliased_cor_taxon_attr = {} + protection_status_value = [] + red_list_filters = {} + for colname, value in self.filters.items(): if colname.startswith("taxhub_attribut"): self.add_join(Taxref, Taxref.cd_nom, self.model.cd_nom) @@ -216,43 +272,7 @@ def filter_taxonomy(self): red_list_cfg = next( (item for item in all_red_lists_cfg if item["id"] == red_list_id), None ) - red_list_cte = ( - select([TaxrefBdcStatutTaxon.cd_ref, bdc_statut_cor_text_area.c.id_area]) - .select_from( - TaxrefBdcStatutTaxon.__table__.join( - TaxrefBdcStatutCorTextValues, - TaxrefBdcStatutCorTextValues.id_value_text - == TaxrefBdcStatutTaxon.id_value_text, - ) - .join( - TaxrefBdcStatutText, - TaxrefBdcStatutText.id_text == TaxrefBdcStatutCorTextValues.id_text, - ) - .join( - TaxrefBdcStatutValues, - TaxrefBdcStatutValues.id_value - == TaxrefBdcStatutCorTextValues.id_value, - ) - .join( - bdc_statut_cor_text_area, - bdc_statut_cor_text_area.c.id_text == TaxrefBdcStatutText.id_text, - ) - ) - .where(TaxrefBdcStatutValues.code_statut.in_(value)) - .where(TaxrefBdcStatutText.cd_type_statut == red_list_cfg["status_type"]) - .where(TaxrefBdcStatutText.enable == True) - .cte(name=f"{red_list_id}_red_list") - ) - # cas_red_list = aliased(CorAreaSynthese) - self.add_join(CorAreaSynthese, CorAreaSynthese.id_synthese, self.model.id_synthese) - self.add_join(Taxref, Taxref.cd_nom, self.model.cd_nom) - self.add_join_multiple_cond( - red_list_cte, - [ - red_list_cte.c.cd_ref == Taxref.cd_ref, - red_list_cte.c.id_area == CorAreaSynthese.id_area, - ], - ) + red_list_filters[red_list_cfg["status_type"]] = value elif colname.endswith("_protection_status"): status_id = colname.replace("_protection_status", "") @@ -262,44 +282,16 @@ def filter_taxonomy(self): ) # Check if a checkbox was used. if ( - isinstance(value, list) - and value[0] == True + isinstance(value, bool) + and value == True and len(status_cfg["status_types"]) == 1 ): value = status_cfg["status_types"] - status_cte = ( - select([TaxrefBdcStatutTaxon.cd_ref, bdc_statut_cor_text_area.c.id_area]) - .select_from( - TaxrefBdcStatutTaxon.__table__.join( - TaxrefBdcStatutCorTextValues, - TaxrefBdcStatutCorTextValues.id_value_text - == TaxrefBdcStatutTaxon.id_value_text, - ) - .join( - TaxrefBdcStatutText, - TaxrefBdcStatutText.id_text == TaxrefBdcStatutCorTextValues.id_text, - ) - .join( - bdc_statut_cor_text_area, - bdc_statut_cor_text_area.c.id_text == TaxrefBdcStatutText.id_text, - ) - ) - .where(TaxrefBdcStatutText.cd_type_statut.in_(value)) - .where(TaxrefBdcStatutText.enable == True) - .distinct() - .cte(name=f"{status_id}_protection_status") - ) - # cas_status = aliased(CorAreaSynthese) - self.add_join(CorAreaSynthese, CorAreaSynthese.id_synthese, self.model.id_synthese) - self.add_join(Taxref, Taxref.cd_nom, self.model.cd_nom) - self.add_join_multiple_cond( - status_cte, - [ - status_cte.c.cd_ref == Taxref.cd_ref, - status_cte.c.id_area == CorAreaSynthese.id_area, - ], - ) + protection_status_value += value + + if protection_status_value or red_list_filters: + self.build_bdc_status_pr_nb_lateral_join(protection_status_value, red_list_filters) # remove attributes taxhub from filters self.filters = { colname: value @@ -307,32 +299,57 @@ def filter_taxonomy(self): if not colname.startswith("taxhub_attribut") } - def filter_other_filters(self): + def filter_other_filters(self, user): """ Other filters """ if "has_medias" in self.filters: - self.query = self.query.where(self.model.medias.any()) + media_filter = self.model.medias.any() + if self.filters["has_medias"] is False: + media_filter = ~media_filter + self.query = self.query.where(media_filter) if "has_alert" in self.filters: - self.query = self.query.where( - self.model.reports.any(TReport.report_type.has(BibReportsTypes.type == "alert")) + alert_filter = self.model.reports.any( + TReport.report_type.has(BibReportsTypes.type == "alert") ) + if self.filters["has_alert"] is False: + alert_filter = ~alert_filter + self.query = self.query.where(alert_filter) if "has_pin" in self.filters: - self.query = self.query.where( - self.model.reports.any(TReport.report_type.has(BibReportsTypes.type == "pin")) + pin_filter = self.model.reports.any( + and_( + TReport.report_type.has(BibReportsTypes.type == "pin"), + TReport.id_role == user.id_role, + ) ) - + if self.filters["has_pin"] is False: + pin_filter = ~pin_filter + self.query = self.query.where(pin_filter) + if "has_comment" in self.filters: + comment_filter = self.model.reports.any( + TReport.report_type.has(BibReportsTypes.type == "discussion") + ) + if self.filters["has_comment"] is False: + comment_filter = ~comment_filter + self.query = self.query.where(comment_filter) if "id_dataset" in self.filters: self.query = self.query.where( self.model.id_dataset.in_(self.filters.pop("id_dataset")) ) if "observers" in self.filters: - # découpe des éléments saisies par les espaces - observers = (self.filters.pop("observers")[0]).split() + # découpe des éléments saisies par des "," + observers = self.filters.pop("observers").split(",") self.query = self.query.where( - and_(*[self.model.observers.ilike("%" + observer + "%") for observer in observers]) + or_( + *[ + func.unaccent(self.model.observers).ilike( + "%" + remove_accents(observer) + "%" + ) + for observer in observers + ] + ) ) if "observers_list" in self.filters: @@ -354,14 +371,18 @@ def filter_other_filters(self): formated_datasets = [d[0] for d in datasets] self.query = self.query.where(self.model.id_dataset.in_(formated_datasets)) if "date_min" in self.filters: - self.query = self.query.where(self.model.date_min >= self.filters.pop("date_min")[0]) + self.query = self.query.where(self.model.date_min >= self.filters.pop("date_min")) if "date_max" in self.filters: # set the date_max at 23h59 because a hour can be set in timestamp - date_max = datetime.datetime.strptime(self.filters.pop("date_max")[0], "%Y-%m-%d") + date_max = datetime.datetime.strptime(self.filters.pop("date_max"), "%Y-%m-%d") date_max = date_max.replace(hour=23, minute=59, second=59) self.query = self.query.where(self.model.date_max <= date_max) + if "id_source" in self.filters: + self.add_join(TSources, self.model.id_source, TSources.id_source) + self.query = self.query.where(self.model.id_source == self.filters.pop("id_source")) + if "id_acquisition_framework" in self.filters: if hasattr(self.model, "id_acquisition_framework"): self.query = self.query.where( @@ -379,32 +400,35 @@ def filter_other_filters(self): if "geoIntersection" in self.filters: # Insersect with the geom send from the map - ors = [] - - for str_wkt in self.filters["geoIntersection"]: + geojson = self.filters["geoIntersection"] + if type(geojson) is not dict or "type" not in geojson: + raise BadRequest("geoIntersection is missing type") + if geojson["type"] == "Feature": + features = [FeatureSchema().load(geojson)] + elif geojson["type"] == "FeatureCollection": + features = FeatureCollectionSchema().load(geojson)["features"] + else: + raise BadRequest("Unsupported geoIntersection type") + geo_filters = [] + for feature in features: + geom_wkb = from_shape(shape(feature["geometry"]), srid=4326) # if the geom is a circle - if "radius" in self.filters: - radius = self.filters.pop("radius")[0] - wkt = loads(str_wkt) - geom_wkb = from_shape(wkt, srid=4326) - ors.append( - func.ST_DWithin( - func.ST_GeogFromWKB(self.model.the_geom_4326), - func.ST_GeogFromWKB(geom_wkb), - radius, - ), + if "radius" in feature["properties"]: + radius = feature["properties"]["radius"] + geo_filter = func.ST_DWithin( + func.ST_GeogFromWKB(self.model.the_geom_4326), + func.ST_GeogFromWKB(geom_wkb), + radius, ) else: - wkt = loads(str_wkt) - geom_wkb = from_shape(wkt, srid=4326) - ors.append(self.model.the_geom_4326.ST_Intersects(geom_wkb)) - - self.query = self.query.where(or_(*ors)) + geo_filter = self.model.the_geom_4326.ST_Intersects(geom_wkb) + geo_filters.append(geo_filter) + self.query = self.query.where(or_(*geo_filters)) self.filters.pop("geoIntersection") if "period_start" in self.filters and "period_end" in self.filters: - period_start = self.filters.pop("period_start")[0] - period_end = self.filters.pop("period_end")[0] + period_start = self.filters.pop("period_start") + period_end = self.filters.pop("period_end") self.query = self.query.where( or_( func.gn_commons.is_in_period( @@ -421,42 +445,53 @@ def filter_other_filters(self): ) if "unique_id_sinp" in self.filters: try: - uuid_filter = uuid.UUID(self.filters.pop("unique_id_sinp")[0]) + uuid_filter = uuid.UUID(self.filters.pop("unique_id_sinp")) except ValueError as e: raise BadRequest(str(e)) self.query = self.query.where(self.model.unique_id_sinp == uuid_filter) # generic filters for colname, value in self.filters.items(): if colname.startswith("area"): - self.add_join(CorAreaSynthese, CorAreaSynthese.id_synthese, self.model.id_synthese) - self.query = self.query.where(CorAreaSynthese.id_area.in_(value)) + cor_area_synthese_alias = aliased(CorAreaSynthese) + self.add_join( + cor_area_synthese_alias, + cor_area_synthese_alias.id_synthese, + self.model.id_synthese, + ) + self.query = self.query.where(cor_area_synthese_alias.id_area.in_(value)) elif colname.startswith("id_"): col = getattr(self.model.__table__.columns, colname) - self.query = self.query.where(col.in_(value)) + if isinstance(value, list): + self.query = self.query.where(col.in_(value)) + else: + self.query = self.query.where(col == value) elif hasattr(self.model.__table__.columns, colname): col = getattr(self.model.__table__.columns, colname) if str(col.type) == "INTEGER": if colname in ["precision"]: - self.query = self.query.where(col <= value[0]) + self.query = self.query.where(col <= value) else: - self.query = self.query.where(col == value[0]) + self.query = self.query.where(col == value) else: - self.query = self.query.where(col.ilike("%{}%".format(value[0]))) + self.query = self.query.where(col.ilike("%{}%".format(value))) - def apply_all_filters(self, user): - self.filter_query_with_cruved(user) + def apply_all_filters(self, user, permissions): + if type(permissions) == int: # scope + self.filter_query_with_cruved(user, scope=permissions) + else: + self.filter_query_with_permissions(user, permissions) self.filter_taxonomy() - self.filter_other_filters() + self.filter_other_filters(user) def build_query(self): if self.query_joins is not None: self.query = self.query.select_from(self.query_joins) return self.query - def filter_query_all_filters(self, user): + def filter_query_all_filters(self, user, permissions): """High level function to manage query with all filters. - Apply CRUVED, toxonomy and other filters. + Apply CRUVED, taxonomy and other filters. Parameters ---------- @@ -468,5 +503,107 @@ def filter_query_all_filters(self, user): sqlalchemy.orm.query.Query.filter Combined filter to apply. """ - self.apply_all_filters(user) + self.apply_all_filters(user, permissions) return self.build_query() + + def build_bdc_status_pr_nb_lateral_join(self, protection_status_value, red_list_filters): + """ + Create subquery for bdc_status filters + + Objectif : filtrer les données ayant : + - les statuts du type demandé par l'utilisateur + - les status s'appliquent bien sur la zone géographique de la donnée (c-a-d le département) + + Idée de façon à limiter le nombre de sous reqêtes, + la liste des status selectionnés par l'utilisateur s'appliquant à l'observation est + aggrégée de façon à tester le nombre puis jointé sur le département de la donnée + """ + # Ajout de la table taxref si non ajouté + self.add_join(Taxref, Taxref.cd_nom, self.model.cd_nom) + + # Ajout jointure permettant d'avoir le département pour chaque donnée + cas_dep = aliased(CorAreaSynthese) + lareas_dep = aliased(LAreas) + bib_area_dep = aliased(BibAreasTypes) + self.add_join(cas_dep, cas_dep.id_synthese, self.model.id_synthese) + self.add_join(lareas_dep, lareas_dep.id_area, cas_dep.id_area) + self.add_join_multiple_cond( + bib_area_dep, + [bib_area_dep.id_type == lareas_dep.id_type, bib_area_dep.type_code == "DEP"], + ) + + # Creation requête CTE : taxon, zone d'application départementale des textes + # pour les taxons répondant aux critères de selection + bdc_status_cte = ( + select( + [ + TaxrefBdcStatutTaxon.cd_ref, + func.array_agg(bdc_statut_cor_text_area.c.id_area).label("ids_area"), + ] + ) + .select_from( + TaxrefBdcStatutTaxon.__table__.join( + TaxrefBdcStatutCorTextValues, + TaxrefBdcStatutCorTextValues.id_value_text + == TaxrefBdcStatutTaxon.id_value_text, + ) + .join( + TaxrefBdcStatutText, + TaxrefBdcStatutText.id_text == TaxrefBdcStatutCorTextValues.id_text, + ) + .join( + TaxrefBdcStatutValues, + TaxrefBdcStatutValues.id_value == TaxrefBdcStatutCorTextValues.id_value, + ) + .join( + bdc_statut_cor_text_area, + bdc_statut_cor_text_area.c.id_text == TaxrefBdcStatutText.id_text, + ) + ) + .where(TaxrefBdcStatutText.enable == True) + ) + + # ajout des filtres de selection des textes + bdc_status_filters = [] + if red_list_filters: + bdc_status_filters = [ + and_( + TaxrefBdcStatutValues.code_statut.in_(v), + TaxrefBdcStatutText.cd_type_statut == k, + ) + for k, v in red_list_filters.items() + ] + if protection_status_value: + bdc_status_filters.append( + TaxrefBdcStatutText.cd_type_statut.in_(protection_status_value) + ) + + bdc_status_cte = bdc_status_cte.where(or_(*bdc_status_filters)) + + # group by de façon à ne selectionner que les taxons + # qui ont les textes selectionnés par l'utilisateurs + bdc_status_cte = bdc_status_cte.group_by(TaxrefBdcStatutTaxon.cd_ref).having( + func.count(distinct(TaxrefBdcStatutText.cd_type_statut)) + == (len(protection_status_value) + len(red_list_filters)) + ) + + bdc_status_cte = bdc_status_cte.cte(name="status") + + # Jointure sur le taxon + # et vérification que l'ensemble des textes + # soit sur bien sur le département de l'observation + self.add_join_multiple_cond( + bdc_status_cte, + [ + bdc_status_cte.c.cd_ref == Taxref.cd_ref, + func.array_length( + func.array_positions(bdc_status_cte.c.ids_area, cas_dep.id_area), 1 + ) + == (len(protection_status_value) + len(red_list_filters)), + ], + ) + + +def remove_accents(input_str): + nfkd_form = unicodedata.normalize("NFKD", input_str) + return "".join([c for c in nfkd_form if not unicodedata.combining(c)]) diff --git a/backend/geonature/core/notifications/admin.py b/backend/geonature/core/notifications/admin.py index 6e322e92ee..c2a4c9101e 100644 --- a/backend/geonature/core/notifications/admin.py +++ b/backend/geonature/core/notifications/admin.py @@ -1,10 +1,14 @@ from flask_admin.contrib.sqla import ModelView from geonature.utils.env import DB +from geonature.core.admin.utils import CruvedProtectedMixin from geonature.core.notifications.models import NotificationCategory -class NotificationTemplateAdmin(ModelView): +class NotificationTemplateAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "NOTIFICATIONS" + column_list = ("code_category", "code_method", "content") column_labels = { "code_category": "Catégorie", @@ -19,7 +23,10 @@ class NotificationTemplateAdmin(ModelView): } -class NotificationCategoryAdmin(ModelView): +class NotificationCategoryAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "NOTIFICATIONS" + column_list = ("code", "label", "description") form_columns = ("code", "label", "description") form_args = { @@ -28,7 +35,10 @@ class NotificationCategoryAdmin(ModelView): } -class NotificationMethodAdmin(ModelView): +class NotificationMethodAdmin(CruvedProtectedMixin, ModelView): + module_code = "ADMIN" + object_code = "NOTIFICATIONS" + column_list = ("code", "label", "description") form_columns = ("code", "label", "description") form_args = { diff --git a/backend/geonature/core/notifications/models.py b/backend/geonature/core/notifications/models.py index 5b605d4ede..498c2fde02 100644 --- a/backend/geonature/core/notifications/models.py +++ b/backend/geonature/core/notifications/models.py @@ -3,9 +3,11 @@ """ import datetime +import sqlalchemy as sa from sqlalchemy import ForeignKey from sqlalchemy.sql import select from sqlalchemy.orm import relationship +from flask import g from utils_flask_sqla.serializers import serializable from pypnusershub.db.models import User @@ -85,6 +87,28 @@ class Notification(db.Model): user = db.relationship(User) +class NotificationRuleQuery(db.Query): + def filter_by_role_with_defaults(self, id_role=None): + if id_role is None: + id_role = g.current_user.id_role + cte = ( + NotificationRule.query.filter( + sa.or_( + NotificationRule.id_role.is_(None), + NotificationRule.id_role == id_role, + ) + ) + .distinct(NotificationRule.code_category, NotificationRule.code_method) + .order_by( + NotificationRule.code_category.desc(), + NotificationRule.code_method.desc(), + NotificationRule.id_role.asc(), + ) + .cte("cte") + ) + return self.filter(NotificationRule.id == cte.c.id) + + @serializable class NotificationRule(db.Model): __tablename__ = "t_notifications_rules" @@ -92,16 +116,28 @@ class NotificationRule(db.Model): db.UniqueConstraint( "id_role", "code_method", "code_category", name="un_role_method_category" ), + db.Index( + "un_method_category", + "code_method", + "code_category", + unique=True, + postgresql_ops={ + "where": sa.text("id_role IS NULL"), + }, + ), {"schema": "gn_notifications"}, ) + query_class = NotificationRuleQuery + id = db.Column(db.Integer, primary_key=True) - id_role = db.Column(db.Integer, ForeignKey(User.id_role), nullable=False) + id_role = db.Column(db.Integer, ForeignKey(User.id_role), nullable=True) code_method = db.Column(db.Unicode, ForeignKey(NotificationMethod.code), nullable=False) code_category = db.Column( db.Unicode, ForeignKey(NotificationCategory.code), nullable=False, ) + subscribed = db.Column(db.Boolean, nullable=False) method = relationship(NotificationMethod) category = relationship(NotificationCategory) diff --git a/backend/geonature/core/notifications/routes.py b/backend/geonature/core/notifications/routes.py index 407bc10221..093a5ba3c1 100644 --- a/backend/geonature/core/notifications/routes.py +++ b/backend/geonature/core/notifications/routes.py @@ -26,11 +26,11 @@ routes = Blueprint("notifications", __name__) log = logging.getLogger() + # Get all database notification for current user @routes.route("/notifications", methods=["GET"]) @permissions.login_required def list_database_notification(): - notifications = Notification.query.filter(Notification.id_role == g.current_user.id_role) notifications = notifications.order_by( Notification.code_status.desc(), Notification.creation_date.desc() @@ -56,7 +56,6 @@ def list_database_notification(): @routes.route("/count", methods=["GET"]) @permissions.login_required def count_notification(): - notificationNumber = Notification.query.filter( Notification.id_role == g.current_user.id_role, Notification.code_status == "UNREAD" ).count() @@ -67,7 +66,6 @@ def count_notification(): @routes.route("/notifications/", methods=["POST"]) @permissions.login_required def update_notification(id_notification): - notification = Notification.query.get_or_404(id_notification) if notification.id_role != g.current_user.id_role: raise Forbidden @@ -80,28 +78,20 @@ def update_notification(id_notification): @routes.route("/rules", methods=["GET"]) @permissions.login_required def list_notification_rules(): - rules = ( - NotificationRule.query.filter(NotificationRule.id_role == g.current_user.id_role) - .order_by( - NotificationRule.code_category.desc(), - NotificationRule.code_method.desc(), - ) - .options( - joinedload("method"), - joinedload("category"), - ) + rules = NotificationRule.query.filter_by_role_with_defaults().options( + joinedload("method"), + joinedload("category"), ) result = [ rule.as_dict( fields=[ - "id", - "id_role", "code_method", "code_category", "method.label", "method.description", "category.label", "category.description", + "subscribed", ] ) for rule in rules.all() @@ -121,39 +111,45 @@ def delete_all_notifications(): # add rule for user -@routes.route("/rules", methods=["PUT"]) +@routes.route( + "/rules/category//method//subscribe", + methods=["POST"], + defaults={"subscribe": True}, +) +@routes.route( + "/rules/category//method//unsubscribe", + methods=["POST"], + defaults={"subscribe": False}, +) @permissions.login_required -def create_rule(): - - requestData = request.get_json() - if requestData is None: - raise BadRequest("Empty request data") - - code_method = requestData.get("code_method") - if not code_method: - raise BadRequest("Missing method") - if not db.session.query( - NotificationMethod.query.filter_by(code=str(code_method)).exists() - ).scalar(): - raise BadRequest("Invalid method") - - code_category = requestData.get("code_category") - if not code_category: - raise BadRequest("Missing category") +def update_rule(code_category, code_method, subscribe): if not db.session.query( NotificationCategory.query.filter_by(code=str(code_category)).exists() ).scalar(): raise BadRequest("Invalid category") + if not db.session.query( + NotificationMethod.query.filter_by(code=str(code_method)).exists() + ).scalar(): + raise BadRequest("Invalid method") # Create new rule for current user - new_rule = NotificationRule( + rule = NotificationRule.query.filter_by( id_role=g.current_user.id_role, code_method=code_method, code_category=code_category, - ) - db.session.add(new_rule) + ).one_or_none() + if rule: + rule.subscribed = subscribe + else: + rule = NotificationRule( + id_role=g.current_user.id_role, + code_method=code_method, + code_category=code_category, + subscribed=subscribe, + ) + db.session.add(rule) db.session.commit() - return jsonify(new_rule.as_dict()) + return jsonify(rule.as_dict(fields=["code_method", "code_category", "subscribed"])) # Delete all rules for current user @@ -167,18 +163,6 @@ def delete_all_rules(): return jsonify(nbRulesDeleted) -# Delete a specific rule -@routes.route("/rules/", methods=["DELETE"]) -@permissions.login_required -def delete_rule(id): - rule = NotificationRule.query.get_or_404(id) - if rule.user != g.current_user: - raise Forbidden - db.session.delete(rule) - db.session.commit() - return "", 204 - - # Get all availabe method for notification @routes.route("/methods", methods=["GET"]) @permissions.login_required diff --git a/backend/geonature/core/notifications/tasks.py b/backend/geonature/core/notifications/tasks.py index 30d825e666..8b238178ff 100644 --- a/backend/geonature/core/notifications/tasks.py +++ b/backend/geonature/core/notifications/tasks.py @@ -8,6 +8,5 @@ @celery_app.task(bind=True) def send_notification_mail(self, subject, content, recipient): - logger.info(f"Launch mail.") mail.send_mail(recipient, subject, content) diff --git a/backend/geonature/core/notifications/utils.py b/backend/geonature/core/notifications/utils.py index 90df7fbc16..f8d8d61ea6 100644 --- a/backend/geonature/core/notifications/utils.py +++ b/backend/geonature/core/notifications/utils.py @@ -2,6 +2,7 @@ from jinja2 import Template from flask import current_app +import sqlalchemy as sa from pypnusershub.db.models import User @@ -40,9 +41,9 @@ def dispatch_notification(category, role, title=None, url=None, *, content=None, # add role, title and url to rendering context context = {"role": role, "title": title, "url": url, **context} - rules = NotificationRule.query.filter( - NotificationRule.id_role == role.id_role, + rules = NotificationRule.query.filter_by_role_with_defaults(role.id_role).filter( NotificationRule.code_category == category.code, + NotificationRule.subscribed.is_(sa.true()), ) for rule in rules.all(): if content: diff --git a/backend/geonature/core/ref_geo/routes.py b/backend/geonature/core/ref_geo/routes.py deleted file mode 100644 index 5990a41451..0000000000 --- a/backend/geonature/core/ref_geo/routes.py +++ /dev/null @@ -1,301 +0,0 @@ -from itertools import groupby -import json - -from flask import Blueprint, request, current_app -from flask.json import jsonify -import sqlalchemy as sa -from sqlalchemy import func, distinct, asc, desc -from sqlalchemy.sql import text -from sqlalchemy.orm import joinedload, undefer -from werkzeug.exceptions import BadRequest - -from geonature.utils.env import db - -from ref_geo.models import BibAreasTypes, LiMunicipalities, LAreas -from utils_flask_sqla.response import json_resp - - -routes = Blueprint("ref_geo", __name__) - -altitude_stmt = sa.select([sa.column("altitude_min"), sa.column("altitude_max"),]).select_from( - func.ref_geo.fct_get_altitude_intersection( - func.ST_SetSRID( - func.ST_GeomFromGeoJSON(sa.bindparam("geojson")), - 4326, - ), - ) -) - -geojson_intersect_filter = func.ST_Intersects( - LAreas.geom, - func.ST_Transform( - func.ST_SetSRID(func.ST_GeomFromGeoJSON(sa.bindparam("geojson")), 4326), - func.Find_SRID("ref_geo", "l_areas", "geom"), - ), -) - -area_size_func = func.ST_Area( - func.ST_Transform( - func.ST_SetSrid( - func.ST_GeomFromGeoJSON(sa.bindparam("geojson")), - 4326, - ), - func.Find_SRID("ref_geo", "l_areas", "geom"), - ) -) - - -@routes.route("/info", methods=["POST"]) -def getGeoInfo(): - """ - From a posted geojson, the route return the municipalities intersected - and the altitude min/max - - .. :quickref: Ref Geo; - """ - if not request.is_json or request.json is None: - raise BadRequest("Missing request payload") - try: - geojson = request.json["geometry"] - except KeyError: - raise BadRequest("Missing 'geometry' in request payload") - geojson = json.dumps(geojson) - - areas = LAreas.query.filter_by(enable=True).filter( - geojson_intersect_filter.params(geojson=geojson) - ) - if "area_type" in request.json: - areas = areas.join(BibAreasTypes).filter_by(type_code=request.json["area_type"]) - elif "id_type" in request.json: - try: - id_type = int(request.json["id_type"]) - except ValueError: - raise BadRequest("Parameter 'id_type' must be an integer") - areas = areas.filter_by(id_type=id_type) - - altitude = db.session.execute(altitude_stmt, params={"geojson": geojson}).fetchone() - - return jsonify( - { - "areas": [ - area.as_dict(fields=["id_area", "id_type", "area_code", "area_name"]) - for area in areas.all() - ], - "altitude": altitude, - } - ) - - -@routes.route("/altitude", methods=["POST"]) -def getAltitude(): - """ - From a posted geojson get the altitude min/max - - .. :quickref: Ref Geo; - """ - if not request.is_json: - raise BadRequest("Missing request payload") - try: - geojson = request.json["geometry"] - except KeyError: - raise BadRequest("Missing 'geometry' in request payload") - geojson = json.dumps(geojson) - - altitude = db.session.execute(altitude_stmt, params={"geojson": geojson}).fetchone() - return jsonify(altitude) - - -@routes.route("/areas", methods=["POST"]) -def getAreasIntersection(): - """ - From a posted geojson, the route return all the area intersected - from l_areas - .. :quickref: Ref Geo; - """ - if not request.is_json or request.json is None: - raise BadRequest("Missing request payload") - try: - geojson = request.json["geometry"] - except KeyError: - raise BadRequest("Missing 'geometry' in request payload") - geojson = json.dumps(geojson) - - areas = LAreas.query.filter_by(enable=True).filter( - geojson_intersect_filter.params(geojson=geojson) - ) - if "area_type" in request.json: - areas = areas.join(BibAreasTypes).filter_by(type_code=request.json["area_type"]) - elif "id_type" in request.json: - try: - id_type = int(request.json["id_type"]) - except ValueError: - raise BadRequest("Parameter 'id_type' must be an integer") - areas = areas.filter_by(id_type=id_type) - areas = areas.order_by(LAreas.id_type) - - response = {} - for id_type, _areas in groupby(areas.all(), key=lambda area: area.id_type): - _areas = list(_areas) - response[id_type] = _areas[0].area_type.as_dict(fields=["type_code", "type_name"]) - response[id_type].update( - { - "areas": [ - area.as_dict( - fields=[ - "area_code", - "area_name", - "id_area", - "id_type", - ] - ) - for area in _areas - ], - } - ) - - return jsonify(response) - - -@routes.route("/municipalities", methods=["GET"]) -@json_resp -def get_municipalities(): - """ - Return the municipalities - .. :quickref: Ref Geo; - """ - parameters = request.args - - q = db.session.query(LiMunicipalities).order_by(LiMunicipalities.nom_com.asc()) - - if "nom_com" in parameters: - q = q.filter(LiMunicipalities.nom_com.ilike("{}%".format(parameters.get("nom_com")))) - limit = int(parameters.get("limit")) if parameters.get("limit") else 100 - - data = q.limit(limit) - return [d.as_dict() for d in data] - - -def to_geojson(data): - features = [] - for feature in data: - geometry = feature.pop("geojson_4326", None) - features.append({"type": "Feature", "properties": feature, "geometry": geometry}) - return features - - -@routes.route("/areas", methods=["GET"]) -def get_areas(): - """ - Return the areas of ref_geo.l_areas - .. :quickref: Ref Geo; - """ - # change all args in a list of value - params = {key: request.args.getlist(key) for key, value in request.args.items()} - - q = ( - db.session.query(LAreas) - .options(joinedload("area_type").load_only("type_code")) - .order_by(LAreas.area_name.asc()) - ) - - if "enable" in params: - enable_param = params["enable"][0].lower() - accepted_enable_values = ["true", "false", "all"] - if enable_param not in accepted_enable_values: - response = { - "message": f"Le paramètre 'enable' accepte seulement les valeurs: {', '.join(accepted_enable_values)}.", - "status": "warning", - } - return response, 400 - if enable_param == "true": - q = q.filter(LAreas.enable == True) - elif enable_param == "false": - q = q.filter(LAreas.enable == False) - else: - q = q.filter(LAreas.enable == True) - - if "id_type" in params: - q = q.filter(LAreas.id_type.in_(params["id_type"])) - - if "type_code" in params: - q = q.filter(LAreas.area_type.has(BibAreasTypes.type_code.in_(params["type_code"]))) - - if "area_name" in params: - q = q.filter(LAreas.area_name.ilike("%{}%".format(params.get("area_name")[0]))) - - limit = int(params.get("limit")[0]) if params.get("limit") else 100 - - data = q.limit(limit) - - # allow to format response - format = request.args.get("format", default="", type=str) - - fields = {"area_type.type_code"} - if format == "geojson": - fields |= {"+geojson_4326"} - data = data.options(undefer("geojson_4326")) - response = [d.as_dict(fields=fields) for d in data] - if format == "geojson": - # format features as geojson according to standard - response = to_geojson(response) - return jsonify(response) - - -@routes.route("/area_size", methods=["Post"]) -def get_area_size(): - """ - Return the area size from a given geojson - - .. :quickref: Ref Geo; - - :returns: An area size (int) - """ - if not request.is_json or request.json is None: - raise BadRequest("Missing request payload") - try: - geojson = request.json["geometry"] - except KeyError: - raise BadRequest("Missing 'geometry' in request payload") - geojson = json.dumps(geojson) - - query = db.session.query(area_size_func.params(geojson=geojson)) - - return jsonify(db.session.execute(query).scalar()) - - -@routes.route("/types", methods=["Get"]) -def get_area_types(): - """ - Get areas types list - - .. :quickref: Areas; - - :query str code: Type area code (ref_geo.bib_areas_types.type_code) - :query str name: Type area name (ref_geo.bib_areas_types.type_name) - :query str sort: sort value as ASC - DESC - """ - type_code = request.args.get("code") - type_name = request.args.get("name") - sort = request.args.get("sort") - query = db.session.query(BibAreasTypes) - # GET ONLY INFO FOR A SPECIFIC CODE - if type_code: - code_exists = ( - db.session.query(BibAreasTypes) - .filter(BibAreasTypes.type_code == type_code) - .one_or_none() - ) - if not code_exists: - raise BadRequest("This area type code does not exist") - query = query.filter(BibAreasTypes.type_code == type_code) - # FILTER BY NAME - if type_name: - query = query.filter(BibAreasTypes.type_name.ilike("%{}%".format(type_name))) - # SORT - if sort == "asc": - query = query.order_by(asc("type_name")) - if sort == "desc": - query = query.order_by(desc("type_name")) - # FIELDS - fields = ["type_name", "type_code", "id_type"] - return jsonify([d.as_dict(fields=fields) for d in query.all()]) diff --git a/backend/geonature/core/routes.py b/backend/geonature/core/routes.py deleted file mode 100644 index 57cb0d73ab..0000000000 --- a/backend/geonature/core/routes.py +++ /dev/null @@ -1,42 +0,0 @@ -""" - Définition de routes "génériques" - c-a-d pouvant servir à tous module -""" - -import os -import logging - -from flask import Blueprint, request, current_app, jsonify - -from geonature.utils.env import DB -from geonature.core.gn_monitoring.config_manager import generate_config - -from geonature.core.gn_permissions import decorators as permissions - -routes = Blueprint("core", __name__) - -# get the root logger -log = logging.getLogger() - - -@routes.route("/config", methods=["GET"]) -@permissions.check_cruved_scope("R", module_code="SUIVIS") -def get_config(): - """ - Parse and return configuration files as toml - .. :quickref: Generic; - """ - app_name = request.args.get("app", "base_app") - vue_name = request.args.getlist("vue") - - base_path = os.path.abspath(os.path.join(current_app.static_folder, "configs")) - conf_path = os.path.abspath(os.path.join(base_path, app_name, *vue_name)) - # test : file inside config folder - if not conf_path.startswith(base_path): - return "Not a valid config path", 404 - - if not vue_name: - vue_name = ["default"] - filename = "{}.toml".format(conf_path) - config_file = generate_config(filename) - return jsonify(config_file) diff --git a/backend/geonature/core/sensitivity/utils.py b/backend/geonature/core/sensitivity/utils.py index 187bc4dec0..3dc7848c4b 100644 --- a/backend/geonature/core/sensitivity/utils.py +++ b/backend/geonature/core/sensitivity/utils.py @@ -50,14 +50,17 @@ def insert_sensitivity_referential(source, csvfile): rules = [] criterias = set() reader = csv.DictReader(csvfile, delimiter=";") + dep_col = next( + fieldname for fieldname in reader.fieldnames if fieldname in ["CD_DEP", "CD_DEPT"] + ) for row in reader: sensi_nomenclature = get_nomenclature("SENSIBILITE", code=row["CD_SENSIBILITE"]) - if row["CD_DEP"] == "D3": + if row[dep_col] == "D3": cd_dep = "973" - elif row["CD_DEP"] == "D4": + elif row[dep_col] == "D4": cd_dep = "974" else: - cd_dep = row["CD_DEP"] + cd_dep = row[dep_col] if row["DUREE"]: duration = int(row["DUREE"]) else: @@ -78,7 +81,7 @@ def insert_sensitivity_referential(source, csvfile): criteria = get_nomenclature("STATUT_BIO", code=row["STATUT_BIOLOGIQUE"]) _criterias |= {criteria} | defaults_nomenclatures[criteria.nomenclature_type] if row["COMPORTEMENT"]: - criteria = get_nomenclature("OCC_COMPORTEMENT", code=row["STATUT_BIOLOGIQUE"]) + criteria = get_nomenclature("OCC_COMPORTEMENT", code=row["COMPORTEMENT"]) _criterias |= {criteria} | defaults_nomenclatures[criteria.nomenclature_type] for criteria in _criterias: criterias.add((len(rules), criteria)) diff --git a/backend/geonature/core/users/register_post_actions.py b/backend/geonature/core/users/register_post_actions.py index 6095596ae2..caa223cd6c 100644 --- a/backend/geonature/core/users/register_post_actions.py +++ b/backend/geonature/core/users/register_post_actions.py @@ -20,6 +20,14 @@ from geonature.utils.env import db, DB +def validators_emails(): + """ + On souhaite récupérer une liste de mails + """ + emails = current_app.config["ACCOUNT_MANAGEMENT"]["VALIDATOR_EMAIL"] + return emails if isinstance(emails, list) else [emails] + + def validate_temp_user(data): """ Send an email after the action of account creation. @@ -44,17 +52,18 @@ def validate_temp_user(data): recipients = [user.email] else: template = "email_admin_validate_account.html" - recipients = [current_app.config["ACCOUNT_MANAGEMENT"]["VALIDATOR_EMAIL"]] + recipients = current_app.config["ACCOUNT_MANAGEMENT"]["VALIDATOR_EMAIL"] url_validation = url_for("users.confirmation", token=user.token_role, _external=True) + additional_fields = [ + {"key": key, "value": value} for key, value in (user_dict.get("champs_addi") or {}).items() + ] + msg_html = render_template( template, url_validation=url_validation, user=user_dict, - additional_fields=[ - {"key": key, "value": value} - for key, value in (user_dict.get("champs_addi") or {}).items() - ], + additional_fields=additional_fields, ) send_mail(recipients, subject, msg_html) @@ -151,7 +160,8 @@ def inform_user(user): text_addon=html_text_addon, ) subject = f"Confirmation inscription {app_name}" - send_mail([user["email"]], subject, msg_html) + recipients = [user["email"]] + send_mail(recipients, subject, msg_html) def send_email_for_recovery(data): @@ -160,7 +170,6 @@ def send_email_for_recovery(data): its password """ user = data["role"] - recipients = current_app.config["MAIL_CONFIG"]["MAIL_USERNAME"] url_password = ( current_app.config["URL_APPLICATION"] + "/#/login/new-password?token=" + data["token"] ) diff --git a/backend/geonature/core/users/routes.py b/backend/geonature/core/users/routes.py index 7029270b10..3d564c9744 100644 --- a/backend/geonature/core/users/routes.py +++ b/backend/geonature/core/users/routes.py @@ -3,7 +3,7 @@ import json -from flask import Blueprint, request, current_app, Response, redirect, g +from flask import Blueprint, request, current_app, Response, redirect, g, render_template from sqlalchemy.sql import distinct, and_ from werkzeug.exceptions import NotFound, BadRequest, Forbidden @@ -117,7 +117,6 @@ def get_roles_by_menu_code(code_liste): @routes.route("/listes", methods=["GET"]) @json_resp def get_listes(): - q = DB.session.query(UserList) lists = q.all() return [l.as_dict() for l in lists] @@ -136,9 +135,11 @@ def get_role(id_role): :type id_role: int """ user = User.query.get_or_404(id_role) - user_fields.add('email') - user_fields.add('champs_addi') - return user.as_dict(fields=user_fields) + fields = user_fields.copy() + if g.current_user == user: + fields.add("email") + fields.add('champs_addi') + return user.as_dict(fields=fields) @routes.route("/roles", methods=["GET"]) @@ -215,6 +216,7 @@ def get_organismes_jdd(): ### ACCOUNT_MANAGEMENT ROUTES ##### ######################### + # TODO: let frontend call UsersHub directly? @routes.route("/inscription", methods=["POST"]) def inscription(): @@ -297,9 +299,14 @@ def confirmation(): ) if r.status_code != 200: + if r.json() and r.json().get("msg"): + return r.json().get("msg"), r.status_code return Response(r), r.status_code - return redirect(config["URL_APPLICATION"], code=302) + new_user = r.json() + return render_template( + "account_created.html", user=new_user, redirect_url=config["URL_APPLICATION"] + ) @routes.route("/after_confirmation", methods=["POST"]) @@ -316,7 +323,7 @@ def after_confirmation(): @routes.route("/role", methods=["PUT"]) -@permissions.check_cruved_scope("R") +@permissions.login_required @json_resp def update_role(): """ @@ -361,9 +368,9 @@ def update_role(): @routes.route("/password/change", methods=["PUT"]) -@check_auth(1, True) +@permissions.login_required @json_resp -def change_password(id_role): +def change_password(): """ Modifie le mot de passe de l'utilisateur connecté et de son ancien mdp Fait appel à l'API UsersHub @@ -371,9 +378,7 @@ def change_password(id_role): if not current_app.config["ACCOUNT_MANAGEMENT"].get("ENABLE_USER_MANAGEMENT", False): return {"message": "Page introuvable"}, 404 - user = DB.session.query(User).get(id_role) - if not user: - return {"msg": "Droit insuffisant"}, 403 + user = g.current_user data = request.get_json() init_password = data.get("init_password", None) diff --git a/backend/geonature/core/users/templates/account_created.html b/backend/geonature/core/users/templates/account_created.html new file mode 100644 index 0000000000..b9f6d96b7c --- /dev/null +++ b/backend/geonature/core/users/templates/account_created.html @@ -0,0 +1,21 @@ + + + +

Création de compte validée

+

Le compte suivant a bien été validé:

+
    +
  • Nom : {{user.nom_role}}
  • +
  • Prénom : {{user.prenom_role}}
  • +
  • Identifiant : {{user.identifiant}}
  • +
  • Email : {{user.email}}
  • + {% for key, value in user.champs_addi.items() %} +
  • + {{key}} : {{value}} +
  • + {% endfor %} +
+ + Retour à l'application GeoNature + + + \ No newline at end of file diff --git a/backend/geonature/core/users/templates/email_admin_validate_account.html b/backend/geonature/core/users/templates/email_admin_validate_account.html index a92c44d413..7379afc5b2 100644 --- a/backend/geonature/core/users/templates/email_admin_validate_account.html +++ b/backend/geonature/core/users/templates/email_admin_validate_account.html @@ -10,6 +10,10 @@
  • Prénom : {{user.prenom_role}}
  • Identifiant : {{user.identifiant}}
  • Email : {{user.email}}
  • +{% if user.remarques %} +
  • Remarques : {{user.remarques}}
  • +{% endif%} + {% if additional_fields | length > 0 %} diff --git a/backend/geonature/core/utils.py b/backend/geonature/core/utils.py deleted file mode 100644 index a3cc0834cc..0000000000 --- a/backend/geonature/core/utils.py +++ /dev/null @@ -1,89 +0,0 @@ -""" -GeoNature core utils -""" - -from geonature.core.gn_meta.models import TDatasets -from geonature.core.users.models import UserRigth -from geonature.utils.env import DB -from werkzeug.exceptions import Forbidden - - -class ModelCruvedAutorization(DB.Model): - """ - Classe abstraite permettant d'ajout des méthodes - de controle d'accès à la donnée en fonction - des droits associés à un utilisateur - La classe enfant doit avoir les attributs suivant dans son constructeur: - - observers - - dataset - - id_digitiser - - id_dataset - A définir en tant que "synonymes" si les attributs sont différents - ou surcharger les méthodes - """ - - __abstract__ = True - - def user_is_observer_or_digitiser(self, user): - observers = [d.id_role for d in self.observers] - return user.id_role == self.id_digitiser or user.id_role in observers - - def user_is_in_dataset_actor(self, user): - return self.id_dataset in ( - d.id_dataset for d in TDatasets.query.filter_by_scope(int(user.value_filter)).all() - ) - - def user_is_allowed_to(self, user, level): - """ - Fonction permettant de dire si un utilisateur - peu ou non agir sur une donnée - """ - # Si l'utilisateur n'a pas de droit d'accès aux données - if level == "0" or level not in ("1", "2", "3"): - return False - - # Si l'utilisateur à le droit d'accéder à toutes les données - if level == "3": - return True - - # Si l'utilisateur est propriétaire de la données - if self.user_is_observer_or_digitiser(user): - return True - - # Si l'utilisateur appartient à un organisme - # qui a un droit sur la données et - # que son niveau d'accès est 2 ou 3 - if self.user_is_in_dataset_actor(user) and level in ("2", "3"): - return True - return False - - def check_if_allowed(self, role, action, level_scope): - """ - Return the releve if the user is allowed - params: - role - """ - user = UserRigth( - id_role=role.id_role, - value_filter=level_scope, - code_action=action, - id_organisme=role.id_organisme, - ) - if self.user_is_allowed_to(user, user.value_filter): - return self - - raise Forbidden( - ('User "{}" cannot "{}" this current releve').format(user.id_role, user.code_action), - ) - - def get_model_cruved(self, user, user_cruved): - """ - Return the user's cruved for a Releve instance. - Use in the map-list interface to allow or not an action - params: - - user : a TRole object - - user_cruved: object return by cruved_for_user_in_app(user) - """ - return { - action: self.user_is_allowed_to(user, level) for action, level in user_cruved.items() - } diff --git a/backend/geonature/migrations/data/core/commons.sql b/backend/geonature/migrations/data/core/commons.sql index 88b8752372..fcd84c0d4c 100644 --- a/backend/geonature/migrations/data/core/commons.sql +++ b/backend/geonature/migrations/data/core/commons.sql @@ -711,7 +711,7 @@ INSERT INTO bib_tables_location (table_desc, schema_name, table_name, pk_field, ; INSERT INTO t_parameters (id_organism, parameter_name, parameter_desc, parameter_value, parameter_extra_value) VALUES -((SELECT id_organisme FROM utilisateurs.bib_organismes WHERE nom_organisme = 'ALL'),'taxref_version','Version du référentiel taxonomique','Taxref V14.0',NULL) +((SELECT id_organisme FROM utilisateurs.bib_organismes WHERE nom_organisme = 'ALL'),'taxref_version','Version du référentiel taxonomique','Taxref V16.0',NULL) ,((SELECT id_organisme FROM utilisateurs.bib_organismes WHERE nom_organisme = 'ALL'),'local_srid','Valeur du SRID local', :local_srid, NULL) ,((SELECT id_organisme FROM utilisateurs.bib_organismes WHERE nom_organisme = 'ALL'),'annee_ref_commune', 'Année du référentiel géographique des communes utilisé', '2017', NULL) ,((SELECT id_organisme FROM utilisateurs.bib_organismes WHERE nom_organisme = 'ALL'),'occtaxmobile_area_type', 'Type de zonage pour lequel la couleur des taxons est calculée pour Occtax-mobile', 'M5', NULL) diff --git a/backend/geonature/migrations/versions/05a91edb6796_improve_v_synthese_for_export.py b/backend/geonature/migrations/versions/05a91edb6796_improve_v_synthese_for_export.py new file mode 100644 index 0000000000..a663096827 --- /dev/null +++ b/backend/geonature/migrations/versions/05a91edb6796_improve_v_synthese_for_export.py @@ -0,0 +1,363 @@ +"""Improve the view gn_synthese.v_export_for_synthese + +Revision ID: 05a91edb6796 +Revises: 5d65f9c93a32 +Create Date: 2023-02-21 17:17:03.016130 + +""" +from alembic import op + + +# revision identifiers, used by Alembic. +revision = "05a91edb6796" +down_revision = "8279ce74006b" +branch_labels = None +depends_on = None + + +def upgrade(): + """Improve the view gn_synthese.v_synthese_for_export : + - Removing "WITHOUT time ZONE" occurences, so as to keep this useful information (time zone) + - (Optimization) Use of "LEFT JOIN LATERAL ..." instead of "JOIN ...", so as to improve the execution time + - Addition of "area_code" to the "area_name" in the outptut field "communes", because some municipalities may have the same name while differ by the code + + N.B. : Use of "DROP VIEW ..." and "CREATE VIEW ..." rather than "CREATE OR REPLACE VIEW ...", so as to handle swap of output fields when changing the view + """ + + # First deleting the existing view + op.execute( + """ + DROP VIEW gn_synthese.v_synthese_for_export; + """ + ) + + # Creating the view in its new form + op.execute( + """ + CREATE VIEW gn_synthese.v_synthese_for_export + AS + SELECT + s.id_synthese AS id_synthese, + s.date_min::date AS date_debut, + s.date_max::date AS date_fin, + s.date_min::time AS heure_debut, + s.date_max::time AS heure_fin, + t.cd_nom AS cd_nom, + t.cd_ref AS cd_ref, + t.nom_valide AS nom_valide, + t.nom_vern AS nom_vernaculaire, + s.nom_cite AS nom_cite, + t.regne AS regne, + t.group1_inpn AS group1_inpn, + t.group2_inpn AS group2_inpn, + t.classe AS classe, + t.ordre AS ordre, + t.famille AS famille, + t.id_rang AS rang_taxo, + s.count_min AS nombre_min, + s.count_max AS nombre_max, + s.altitude_min AS alti_min, + s.altitude_max AS alti_max, + s.depth_min AS prof_min, + s.depth_max AS prof_max, + s.observers AS observateurs, + s.id_digitiser AS id_digitiser, + -- Utile pour le CRUVED + s.determiner AS determinateur, + sa.communes AS communes, + st_astext(s.the_geom_4326) AS geometrie_wkt_4326, + st_x(s.the_geom_point) AS x_centroid_4326, + st_y(s.the_geom_point) AS y_centroid_4326, + st_asgeojson(s.the_geom_4326) AS geojson_4326, + -- Utile pour la génération de l'export en SHP + st_asgeojson(s.the_geom_local) AS geojson_local, + -- Utile pour la génération de l'export en SHP + s.place_name AS nom_lieu, + s.comment_context AS comment_releve, + s.comment_description AS comment_occurrence, + s.validator AS validateur, + n21.label_default AS niveau_validation, + s.meta_validation_date AS date_validation, + s.validation_comment AS comment_validation, + s.digital_proof AS preuve_numerique_url, + s.non_digital_proof AS preuve_non_numerique, + d.dataset_name AS jdd_nom, + d.unique_dataset_id AS jdd_uuid, + d.id_dataset AS jdd_id, + -- Utile pour le CRUVED + af.acquisition_framework_name AS ca_nom, + af.unique_acquisition_framework_id AS ca_uuid, + d.id_acquisition_framework AS ca_id, + s.cd_hab AS cd_habref, + hab.lb_code AS cd_habitat, + hab.lb_hab_fr AS nom_habitat, + s.precision AS precision_geographique, + n1.label_default AS nature_objet_geo, + n2.label_default AS type_regroupement, + s.grp_method AS methode_regroupement, + n3.label_default AS technique_observation, + n5.label_default AS biologique_statut, + n6.label_default AS etat_biologique, + n22.label_default AS biogeographique_statut, + n7.label_default AS naturalite, + n8.label_default AS preuve_existante, + n9.label_default AS niveau_precision_diffusion, + n10.label_default AS stade_vie, + n11.label_default AS sexe, + n12.label_default AS objet_denombrement, + n13.label_default AS type_denombrement, + n14.label_default AS niveau_sensibilite, + n15.label_default AS statut_observation, + n16.label_default AS floutage_dee, + n17.label_default AS statut_source, + n18.label_default AS type_info_geo, + n19.label_default AS methode_determination, + n20.label_default AS comportement, + s.reference_biblio AS reference_biblio, + s.entity_source_pk_value AS id_origine, + s.unique_id_sinp AS uuid_perm_sinp, + s.unique_id_sinp_grp AS uuid_perm_grp_sinp, + s.meta_create_date AS date_creation, + s.meta_update_date AS date_modification, + s.additional_data AS champs_additionnels, + COALESCE(s.meta_update_date, s.meta_create_date) AS derniere_action + FROM + gn_synthese.synthese s + JOIN taxonomie.taxref t ON + t.cd_nom = s.cd_nom + JOIN gn_meta.t_datasets d ON + d.id_dataset = s.id_dataset + JOIN gn_meta.t_acquisition_frameworks af ON + d.id_acquisition_framework = af.id_acquisition_framework + LEFT JOIN LATERAL ( + SELECT + cas.id_synthese, + string_agg(DISTINCT concat(a_1.area_name, ' (', a_1.area_code, ')'), ', '::TEXT) AS communes + FROM + gn_synthese.cor_area_synthese cas + JOIN ref_geo.l_areas a_1 ON + cas.id_area = a_1.id_area + JOIN ref_geo.bib_areas_types ta ON + ta.id_type = a_1.id_type + AND ta.type_code = 'COM' + WHERE + cas.id_synthese = s.id_synthese + GROUP BY + cas.id_synthese) sa ON + TRUE + LEFT JOIN ref_nomenclatures.t_nomenclatures n1 ON + s.id_nomenclature_geo_object_nature = n1.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n2 ON + s.id_nomenclature_grp_typ = n2.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n3 ON + s.id_nomenclature_obs_technique = n3.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n5 ON + s.id_nomenclature_bio_status = n5.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n6 ON + s.id_nomenclature_bio_condition = n6.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n7 ON + s.id_nomenclature_naturalness = n7.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n8 ON + s.id_nomenclature_exist_proof = n8.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n9 ON + s.id_nomenclature_diffusion_level = n9.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n10 ON + s.id_nomenclature_life_stage = n10.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n11 ON + s.id_nomenclature_sex = n11.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n12 ON + s.id_nomenclature_obj_count = n12.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n13 ON + s.id_nomenclature_type_count = n13.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n14 ON + s.id_nomenclature_sensitivity = n14.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n15 ON + s.id_nomenclature_observation_status = n15.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n16 ON + s.id_nomenclature_blurring = n16.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n17 ON + s.id_nomenclature_source_status = n17.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n18 ON + s.id_nomenclature_info_geo_type = n18.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n19 ON + s.id_nomenclature_determination_method = n19.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n20 ON + s.id_nomenclature_behaviour = n20.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n21 ON + s.id_nomenclature_valid_status = n21.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n22 ON + s.id_nomenclature_biogeo_status = n22.id_nomenclature + LEFT JOIN ref_habitats.habref hab ON + hab.cd_hab = s.cd_hab; + """ + ) + + +def downgrade(): + # First deleting the existing view + op.execute( + """ + DROP VIEW gn_synthese.v_synthese_for_export; + """ + ) + + # Creating the view back in its ancient form + op.execute( + """ + CREATE OR REPLACE VIEW gn_synthese.v_synthese_for_export AS + SELECT + s.id_synthese AS id_synthese, + s.date_min::date AS date_debut, + s.date_max::date AS date_fin, + s.date_min::time AS heure_debut, + s.date_max::time AS heure_fin, + t.cd_nom AS cd_nom, + t.cd_ref AS cd_ref, + t.nom_valide AS nom_valide, + t.nom_vern AS nom_vernaculaire, + s.nom_cite AS nom_cite, + t.regne AS regne, + t.group1_inpn AS group1_inpn, + t.group2_inpn AS group2_inpn, + t.classe AS classe, + t.ordre AS ordre, + t.famille AS famille, + t.id_rang AS rang_taxo, + s.count_min AS nombre_min, + s.count_max AS nombre_max, + s.altitude_min AS alti_min, + s.altitude_max AS alti_max, + s.depth_min AS prof_min, + s.depth_max AS prof_max, + s.observers AS observateurs, + s.id_digitiser AS id_digitiser, + -- Utile pour le CRUVED + s.determiner AS determinateur, + communes AS communes, + public.ST_astext(s.the_geom_4326) AS geometrie_wkt_4326, + public.ST_x(s.the_geom_point) AS x_centroid_4326, + public.ST_y(s.the_geom_point) AS y_centroid_4326, + public.ST_asgeojson(s.the_geom_4326) AS geojson_4326, + -- Utile pour la génération de l'export en SHP + public.ST_asgeojson(s.the_geom_local) AS geojson_local, + -- Utile pour la génération de l'export en SHP + s.place_name AS nom_lieu, + s.comment_context AS comment_releve, + s.comment_description AS comment_occurrence, + s.validator AS validateur, + n21.label_default AS niveau_validation, + s.meta_validation_date AS date_validation, + s.validation_comment AS comment_validation, + s.digital_proof AS preuve_numerique_url, + s.non_digital_proof AS preuve_non_numerique, + d.dataset_name AS jdd_nom, + d.unique_dataset_id AS jdd_uuid, + d.id_dataset AS jdd_id, + -- Utile pour le CRUVED + af.acquisition_framework_name AS ca_nom, + af.unique_acquisition_framework_id AS ca_uuid, + d.id_acquisition_framework AS ca_id, + s.cd_hab AS cd_habref, + hab.lb_code AS cd_habitat, + hab.lb_hab_fr AS nom_habitat, + s.precision AS precision_geographique, + n1.label_default AS nature_objet_geo, + n2.label_default AS type_regroupement, + s.grp_method AS methode_regroupement, + n3.label_default AS technique_observation, + n5.label_default AS biologique_statut, + n6.label_default AS etat_biologique, + n22.label_default AS biogeographique_statut, + n7.label_default AS naturalite, + n8.label_default AS preuve_existante, + n9.label_default AS niveau_precision_diffusion, + n10.label_default AS stade_vie, + n11.label_default AS sexe, + n12.label_default AS objet_denombrement, + n13.label_default AS type_denombrement, + n14.label_default AS niveau_sensibilite, + n15.label_default AS statut_observation, + n16.label_default AS floutage_dee, + n17.label_default AS statut_source, + n18.label_default AS type_info_geo, + n19.label_default AS methode_determination, + n20.label_default AS comportement, + s.reference_biblio AS reference_biblio, + s.entity_source_pk_value AS id_origine, + s.unique_id_sinp AS uuid_perm_sinp, + s.unique_id_sinp_grp AS uuid_perm_grp_sinp, + s.meta_create_date AS date_creation, + s.meta_update_date AS date_modification, + s.additional_data AS champs_additionnels, + COALESCE(s.meta_update_date, + s.meta_create_date) AS derniere_action + FROM + gn_synthese.synthese s + JOIN taxonomie.taxref t ON + t.cd_nom = s.cd_nom + JOIN gn_meta.t_datasets d ON + d.id_dataset = s.id_dataset + JOIN gn_meta.t_acquisition_frameworks af ON + d.id_acquisition_framework = af.id_acquisition_framework + LEFT OUTER JOIN ( + SELECT + id_synthese, + string_agg(DISTINCT area_name, + ', ') AS communes + FROM + gn_synthese.cor_area_synthese cas + LEFT OUTER JOIN ref_geo.l_areas a_1 ON + cas.id_area = a_1.id_area + JOIN ref_geo.bib_areas_types ta ON + ta.id_type = a_1.id_type + AND ta.type_code = 'COM' + GROUP BY + id_synthese + ) sa ON + sa.id_synthese = s.id_synthese + LEFT JOIN ref_nomenclatures.t_nomenclatures n1 ON + s.id_nomenclature_geo_object_nature = n1.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n2 ON + s.id_nomenclature_grp_typ = n2.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n3 ON + s.id_nomenclature_obs_technique = n3.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n5 ON + s.id_nomenclature_bio_status = n5.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n6 ON + s.id_nomenclature_bio_condition = n6.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n7 ON + s.id_nomenclature_naturalness = n7.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n8 ON + s.id_nomenclature_exist_proof = n8.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n9 ON + s.id_nomenclature_diffusion_level = n9.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n10 ON + s.id_nomenclature_life_stage = n10.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n11 ON + s.id_nomenclature_sex = n11.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n12 ON + s.id_nomenclature_obj_count = n12.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n13 ON + s.id_nomenclature_type_count = n13.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n14 ON + s.id_nomenclature_sensitivity = n14.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n15 ON + s.id_nomenclature_observation_status = n15.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n16 ON + s.id_nomenclature_blurring = n16.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n17 ON + s.id_nomenclature_source_status = n17.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n18 ON + s.id_nomenclature_info_geo_type = n18.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n19 ON + s.id_nomenclature_determination_method = n19.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n20 ON + s.id_nomenclature_behaviour = n20.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n21 ON + s.id_nomenclature_valid_status = n21.id_nomenclature + LEFT JOIN ref_nomenclatures.t_nomenclatures n22 ON + s.id_nomenclature_biogeo_status = n22.id_nomenclature + LEFT JOIN ref_habitats.habref hab ON + hab.cd_hab = s.cd_hab; + """ + ) diff --git a/backend/geonature/migrations/versions/0630b93bcfe0_add_permissions_inherited_modules_objects.py b/backend/geonature/migrations/versions/0630b93bcfe0_add_permissions_inherited_modules_objects.py new file mode 100644 index 0000000000..973ad40ccb --- /dev/null +++ b/backend/geonature/migrations/versions/0630b93bcfe0_add_permissions_inherited_modules_objects.py @@ -0,0 +1,645 @@ +"""add permissions inherited modules objects + +Add rows in `gn_permissions.cor_role_action_filter_module_object` to keep the permissions that were previously +inherited from the module "GEONATURE" and the object "ALL". +Necessary with the removal of modules and objects inheritance in the function used to get permissions. + +Revision ID: 0630b93bcfe0 +Revises: cf1c1fdbde77 +Create Date: 2023-04-13 14:24:21.124669 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "0630b93bcfe0" +down_revision = "df5a5099e084" +branch_labels = None +depends_on = None + + +def upgrade(): + """ + Add the permissions that were computed by modules/objects inheritance + Remove the 'scope 0' filter + + Create backup tables for the permissions, filters types and filters. + """ + + """ + Backup permissions and filters in order to be able to downgrade + """ + # Backup permissions + # include Foreign Keys, so that restore is eventually not broken by update/removal + # of any referenced role, action, filter, module or object + op.execute( + """ + create or replace function create_backup_table(source_table text, new_table text) + returns void language plpgsql + as $$ + declare + rec record; + begin + execute format( + 'create table %s (like %s including constraints including identity including indexes)', + new_table, source_table); + for rec in + select oid, conname + from pg_constraint + where contype = 'f' + and conrelid = source_table::regclass + loop + execute format( + 'alter table %s add constraint %s %s', + new_table, + 'backup_' || rec.conname, + pg_get_constraintdef(rec.oid)); + end loop; + execute format( + 'insert into %s select * from %s', + new_table, source_table); + end $$; + """ + ) + op.execute( + """ + SELECT + create_backup_table( + 'gn_permissions.cor_role_action_filter_module_object', + 'gn_permissions.backup_cor_role_action_filter_module_object' + ); + """ + ) + op.execute( + """ + SELECT + create_backup_table( + 'gn_permissions.t_filters', + 'gn_permissions.backup_t_filters' + ); + """ + ) + op.execute( + """ + SELECT + create_backup_table( + 'gn_permissions.bib_filters_type', + 'gn_permissions.backup_bib_filters_type' + ); + """ + ) + op.execute( + """ + DROP FUNCTION create_backup_table; + """ + ) + # Associate FK 'fk_backup_cor_r_a_f_m_o_id_filter' to 'backup_t_filters' instead of 't_filters' + # to be able to delete scope 0 from 't_filters' while keeping the associated rows in backup of permissions + # ON DELETE and ON UPDATE rules should not matter, as no modification from 'backup_t_filters' is expected + op.drop_constraint( + "backup_fk_cor_r_a_f_m_o_id_filter", + table_name="backup_cor_role_action_filter_module_object", + schema="gn_permissions", + ) + op.create_foreign_key( + "backup_fk_cor_r_a_f_m_o_id_filter", + source_schema="gn_permissions", + source_table="backup_cor_role_action_filter_module_object", + local_cols=["id_filter"], + referent_schema="gn_permissions", + referent_table="backup_t_filters", + remote_cols=["id_filter"], + onupdate=None, + ondelete="CASCADE", + ) + op.drop_constraint( + "backup_fk_t_filters_id_filter_type", + table_name="backup_t_filters", + schema="gn_permissions", + ) + op.create_foreign_key( + "backup_fk_t_filters_id_filter_type", + source_schema="gn_permissions", + source_table="backup_t_filters", + local_cols=["id_filter_type"], + referent_schema="gn_permissions", + referent_table="backup_bib_filters_type", + remote_cols=["id_filter_type"], + onupdate=None, + ondelete="CASCADE", + ) + + """ + Remove permissions with filters which are not of SCOPE type + """ + op.execute( + """ + DELETE FROM + gn_permissions.cor_role_action_filter_module_object p + USING + gn_permissions.t_filters f, + gn_permissions.bib_filters_type t + WHERE + p.id_filter = f.id_filter + AND + f.id_filter_type = t.id_filter_type + AND + t.code_filter_type != 'SCOPE' + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.t_filters f + USING + gn_permissions.bib_filters_type t + WHERE + f.id_filter_type = t.id_filter_type + AND + t.code_filter_type != 'SCOPE' + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.bib_filters_type t + WHERE + t.code_filter_type != 'SCOPE' + """ + ) + """ + Thereafter, all permissions are of SCOPE type without requiring verification + """ + + """ + Modules inheritance: + + Inherit permissions (id_role, id_action, id_filter, _ , id_object) from module GN to other modules + when a permission (id_role, id_action, _ , _ , id_object) is not already specified for the other module. + """ + # For permissions associated to roles which are users, and roles which are groups with no corresponding user that + # have a scope restriction for the permission : + # by 'scope restriction' we mean a case where the permission to be inserted is + # (id_role_group, id_action, id_filter_group, id_module, id_object), with id_filter_group of type 'SCOPE', + # where the role id_role_user is associated to the role id_role_group and where there exists a permission + # (id_role_user, id_action, id_filter_user, id_module, id_object) with id_filter_user < id_filter_group). + op.execute( + """ + INSERT + INTO + gn_permissions.cor_role_action_filter_module_object + (id_role, + id_action, + id_filter, + id_module, + id_object) + SELECT + perm_gn.id_role, + perm_gn.id_action, + perm_gn.id_filter, + module_other.id_module, + perm_gn.id_object + FROM + gn_commons.t_modules module_other, + gn_permissions.cor_role_action_filter_module_object perm_gn + JOIN gn_commons.t_modules module_gn ON perm_gn.id_module = module_gn.id_module + JOIN utilisateurs.t_roles r ON perm_gn.id_role = r.id_role + JOIN gn_permissions.t_filters filter_scope_group ON perm_gn.id_filter = filter_scope_group.id_filter + WHERE + module_gn.module_code = 'GEONATURE' + AND + module_other.module_code != 'GEONATURE' + AND + ( + r.groupe IS FALSE + OR + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object restriction_perm_exists + JOIN utilisateurs.cor_roles association_group_user + ON restriction_perm_exists.id_role = association_group_user.id_role_utilisateur + AND perm_gn.id_role = association_group_user.id_role_groupe + JOIN gn_permissions.t_filters filter_scope_user + ON restriction_perm_exists.id_filter = filter_scope_user.id_filter + WHERE + restriction_perm_exists.id_action = perm_gn.id_action + AND + restriction_perm_exists.id_module = module_other.id_module + AND + restriction_perm_exists.id_object = perm_gn.id_object + AND + filter_scope_user.value_filter::INTEGER < filter_scope_group.value_filter::INTEGER + ) + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_exists + WHERE + perm_exists.id_role = perm_gn.id_role + AND + perm_exists.id_action = perm_gn.id_action + AND + perm_exists.id_module = module_other.id_module + AND + perm_exists.id_object = perm_gn.id_object + ) + ; + """ + ) + # For permissions associated to roles which are groups with at least one corresponding user that have a scope + # restriction for the permission + op.execute( + """ + INSERT + INTO + gn_permissions.cor_role_action_filter_module_object + (id_role, + id_action, + id_filter, + id_module, + id_object) + SELECT + user_other.id_role, + perm_gn.id_action, + perm_gn.id_filter, + module_other.id_module, + perm_gn.id_object + FROM + gn_commons.t_modules module_other, + gn_permissions.cor_role_action_filter_module_object perm_gn + JOIN gn_commons.t_modules module_gn + ON perm_gn.id_module = module_gn.id_module + JOIN utilisateurs.t_roles role_groupe + ON perm_gn.id_role = role_groupe.id_role + JOIN gn_permissions.t_filters filter_scope_group + ON perm_gn.id_filter = filter_scope_group.id_filter, + utilisateurs.t_roles user_other + JOIN utilisateurs.cor_roles association_group_other_user + ON user_other.id_role = association_group_other_user.id_role_utilisateur + WHERE + module_gn.module_code = 'GEONATURE' + AND + module_other.module_code != 'GEONATURE' + AND + perm_gn.id_role = association_group_other_user.id_role_groupe + AND + ( + role_groupe.groupe IS TRUE + AND + EXISTS + ( + SELECT + restriction_perm_exists.id_role AS id_user_with_restriction + FROM + gn_permissions.cor_role_action_filter_module_object restriction_perm_exists + JOIN utilisateurs.cor_roles association_group_user + ON restriction_perm_exists.id_role = association_group_user.id_role_utilisateur + AND perm_gn.id_role = association_group_user.id_role_groupe + JOIN gn_permissions.t_filters filter_scope_user + ON restriction_perm_exists.id_filter = filter_scope_user.id_filter + WHERE + restriction_perm_exists.id_action = perm_gn.id_action + AND + restriction_perm_exists.id_module = module_other.id_module + AND + restriction_perm_exists.id_object = perm_gn.id_object + AND + filter_scope_user.value_filter::INTEGER < filter_scope_group.value_filter::INTEGER + ) + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_group_exists + WHERE + perm_group_exists.id_role = perm_gn.id_role + AND + perm_group_exists.id_action = perm_gn.id_action + AND + perm_group_exists.id_module = module_other.id_module + AND + perm_group_exists.id_object = perm_gn.id_object + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_user_exists + WHERE + perm_user_exists.id_role = user_other.id_role + AND + perm_user_exists.id_action = perm_gn.id_action + AND + perm_user_exists.id_module = module_other.id_module + AND + perm_user_exists.id_object = perm_gn.id_object + ) + ; + """ + ) + + """ + Objects inheritance: + + Inherit permissions (id_role, id_action, id_filter, id_module, id_object("ALL")) from object ALL to other objects + when the other object is associated to the module id_module in the `gn_permissions.cor_object_module` + and when a permission (id_role, id_action, _ , id_module, _ ) is not already specified for the other object. + """ + # For permissions associated to roles which are users, and roles which are groups with no corresponding user that + # have a scope restriction for the permission : + op.execute( + """ + INSERT + INTO + gn_permissions.cor_role_action_filter_module_object + (id_role, + id_action, + id_filter, + id_module, + id_object) + SELECT + perm_object_all.id_role, + perm_object_all.id_action, + perm_object_all.id_filter, + perm_object_all.id_module, + object_other.id_object + FROM + gn_permissions.cor_role_action_filter_module_object perm_object_all + JOIN gn_permissions.t_objects object_all + ON perm_object_all.id_object = object_all.id_object + JOIN gn_permissions.cor_object_module cor_object_module + ON cor_object_module.id_module = perm_object_all.id_module + JOIN gn_permissions.t_objects object_other + ON object_other.id_object = cor_object_module.id_object + JOIN utilisateurs.t_roles r + ON perm_object_all.id_role = r.id_role + JOIN gn_permissions.t_filters filter_scope_group + ON perm_object_all.id_filter = filter_scope_group.id_filter + WHERE + object_all.code_object = 'ALL' + AND + object_other.code_object != 'ALL' + AND + ( + r.groupe IS FALSE + OR + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object restriction_perm_exists + JOIN utilisateurs.cor_roles association_group_user + ON restriction_perm_exists.id_role = association_group_user.id_role_utilisateur + AND perm_object_all.id_role = association_group_user.id_role_groupe + JOIN gn_permissions.t_filters filter_scope_user + ON restriction_perm_exists.id_filter = filter_scope_user.id_filter + WHERE + restriction_perm_exists.id_action = perm_object_all.id_action + AND + restriction_perm_exists.id_module = perm_object_all.id_module + AND + restriction_perm_exists.id_object = object_other.id_object + AND + filter_scope_user.value_filter::INTEGER < filter_scope_group.value_filter::INTEGER + ) + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_exists + WHERE + perm_exists.id_role = perm_object_all.id_role + AND + perm_exists.id_action = perm_object_all.id_action + AND + perm_exists.id_module = perm_object_all.id_module + AND + perm_exists.id_object = object_other.id_object + ) + ; + """ + ) + # For permissions associated to roles which are groups with at least one corresponding user that have a scope + # restriction for the permission + op.execute( + """ + INSERT + INTO + gn_permissions.cor_role_action_filter_module_object + (id_role, + id_action, + id_filter, + id_module, + id_object) + SELECT + user_other.id_role, + perm_object_all.id_action, + perm_object_all.id_filter, + perm_object_all.id_module, + object_other.id_object + FROM + gn_permissions.cor_role_action_filter_module_object perm_object_all + JOIN gn_permissions.t_objects object_all + ON perm_object_all.id_object = object_all.id_object + JOIN gn_permissions.cor_object_module cor_object_module + ON cor_object_module.id_module = perm_object_all.id_module + JOIN gn_permissions.t_objects object_other + ON object_other.id_object = cor_object_module.id_object + JOIN utilisateurs.t_roles role_groupe + ON perm_object_all.id_role = role_groupe.id_role + JOIN gn_permissions.t_filters filter_scope_group + ON perm_object_all.id_filter = filter_scope_group.id_filter, + utilisateurs.t_roles user_other + JOIN utilisateurs.cor_roles association_group_other_user + ON user_other.id_role = association_group_other_user.id_role_utilisateur + WHERE + object_all.code_object = 'ALL' + AND + object_other.code_object != 'ALL' + AND + perm_object_all.id_role = association_group_other_user.id_role_groupe + AND + ( + role_groupe.groupe IS TRUE + AND + EXISTS + ( + SELECT + restriction_perm_exists.id_role AS id_user_with_restriction + FROM + gn_permissions.cor_role_action_filter_module_object restriction_perm_exists + JOIN utilisateurs.cor_roles association_group_user + ON restriction_perm_exists.id_role = association_group_user.id_role_utilisateur + AND perm_object_all.id_role = association_group_user.id_role_groupe + JOIN gn_permissions.t_filters filter_scope_user + ON restriction_perm_exists.id_filter = filter_scope_user.id_filter + WHERE + restriction_perm_exists.id_action = perm_object_all.id_action + AND + restriction_perm_exists.id_module = perm_object_all.id_module + AND + restriction_perm_exists.id_object = object_other.id_object + AND + filter_scope_user.value_filter::INTEGER < filter_scope_group.value_filter::INTEGER + ) + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_group_exists + WHERE + perm_group_exists.id_role = perm_object_all.id_role + AND + perm_group_exists.id_action = perm_object_all.id_action + AND + perm_group_exists.id_module = perm_object_all.id_module + AND + perm_group_exists.id_object = object_other.id_object + ) + AND + NOT EXISTS + ( + SELECT + NULL + FROM + gn_permissions.cor_role_action_filter_module_object perm_user_exists + WHERE + perm_user_exists.id_role = user_other.id_role + AND + perm_user_exists.id_action = perm_object_all.id_action + AND + perm_user_exists.id_module = perm_object_all.id_module + AND + perm_user_exists.id_object = object_other.id_object + ) + ; + """ + ) + + """ + Remove scope '0' + """ + # Remove associated permissions + op.execute( + """ + DELETE FROM + gn_permissions.cor_role_action_filter_module_object p + WHERE + id_filter = ( + SELECT + f.id_filter + FROM + gn_permissions.t_filters f + JOIN + gn_permissions.bib_filters_type t USING (id_filter_type) + WHERE + t.code_filter_type = 'SCOPE' + AND + f.value_filter = '0' + ) + """ + ) + # Remove filter + # we assume that we have 'ON DELETE NO ACTION' for 'fk_cor_r_a_f_m_o_id_filter', + # and thus for 'fk_backup_cor_r_a_f_m_o_id_filter, and that removal of the filter for 'scope 0' will not + # lead, by cascade, to a suppression of the associated permissions in the backup table of permissions. + op.execute( + """ + DELETE FROM + gn_permissions.t_filters + WHERE + id_filter_type = ( + SELECT id_filter_type + FROM gn_permissions.bib_filters_type + WHERE code_filter_type = 'SCOPE' + ) + AND + value_filter = '0' + """ + ) + + +def downgrade(): + """ + Restore the permissions and filters from backup + """ + + # First clear existing data + op.execute( + """ + DELETE FROM gn_permissions.cor_role_action_filter_module_object; + DELETE FROM gn_permissions.t_filters; + DELETE FROM gn_permissions.bib_filters_type; + """ + ) + + """ + Restore filters types from backup table + """ + op.execute( + """ + INSERT INTO gn_permissions.bib_filters_type + (id_filter_type, code_filter_type, label_filter_type, description_filter_type) + SELECT id_filter_type, code_filter_type, label_filter_type, description_filter_type + FROM gn_permissions.backup_bib_filters_type + """ + ) + + """ + Restore filters from backup table + + restores, in particular, the filter for scope 0 + """ + op.execute( + """ + INSERT INTO gn_permissions.t_filters + (id_filter, label_filter, value_filter, description_filter, id_filter_type) + SELECT id_filter, label_filter, value_filter, description_filter, id_filter_type + FROM gn_permissions.backup_t_filters + """ + ) + + """ + Restore permissions from backup table + """ + op.execute( + """ + INSERT INTO gn_permissions.cor_role_action_filter_module_object + (id_permission, id_role, id_action, id_module, id_object, id_filter) + SELECT id_permission, id_role, id_action, id_module, id_object, id_filter + FROM gn_permissions.backup_cor_role_action_filter_module_object; + """ + ) + + """ + Drop backup tables + + drop 'backup_t_filters' after 'backup_cor_role_action_filter_module_object' because of + 'fk_backup_cor_r_a_f_m_o_id_filter' referencing 'backup_t_filters' + """ + op.drop_table( + schema="gn_permissions", table_name="backup_cor_role_action_filter_module_object" + ) + op.drop_table(schema="gn_permissions", table_name="backup_t_filters") + op.drop_table(schema="gn_permissions", table_name="backup_bib_filters_type") diff --git a/backend/geonature/migrations/versions/09a637f06b96_default_notification_rules.py b/backend/geonature/migrations/versions/09a637f06b96_default_notification_rules.py new file mode 100644 index 0000000000..3dda111a23 --- /dev/null +++ b/backend/geonature/migrations/versions/09a637f06b96_default_notification_rules.py @@ -0,0 +1,59 @@ +"""Default notification rules + +Revision ID: 09a637f06b96 +Revises: 4cf3fd5d06f5 +Create Date: 2023-01-13 09:55:53.525869 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "09a637f06b96" +down_revision = "4cf3fd5d06f5" +branch_labels = None +depends_on = None + + +def upgrade(): + # Allows NULL id_role to define default rules + op.alter_column( + schema="gn_notifications", + table_name="t_notifications_rules", + column_name="id_role", + nullable=True, + ) + # Create partial index on (code_method, code_category) where id_role IS NULL. + # This allows to create only one default rule for each method / category couple. + op.create_index( + schema="gn_notifications", + table_name="t_notifications_rules", + index_name="un_method_category", + columns=["code_method", "code_category"], + postgresql_where=sa.text("id_role IS NULL"), + ) + op.add_column( + schema="gn_notifications", + table_name="t_notifications_rules", + column=sa.Column("subscribed", sa.Boolean, nullable=False, server_default=sa.true()), + ) + + +def downgrade(): + op.drop_column( + schema="gn_notifications", + table_name="t_notifications_rules", + column_name="subscribed", + ) + op.drop_index( + schema="gn_notifications", + table_name="t_notifications_rules", + index_name="un_method_category", + ) + op.alter_column( + schema="gn_notifications", + table_name="t_notifications_rules", + column_name="id_role", + nullable=False, + ) diff --git a/backend/geonature/migrations/versions/0cae32a010ea_lstrip_static_media_from_path.py b/backend/geonature/migrations/versions/0cae32a010ea_lstrip_static_media_from_path.py new file mode 100644 index 0000000000..594218885b --- /dev/null +++ b/backend/geonature/migrations/versions/0cae32a010ea_lstrip_static_media_from_path.py @@ -0,0 +1,62 @@ +"""lstrip static/medias/ from t_medias.media_path + +Revision ID: 0cae32a010ea +Revises: 497f52d996dd +Create Date: 2023-01-25 18:01:06.482391 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "0cae32a010ea" +down_revision = "497f52d996dd" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + UPDATE + gn_commons.t_medias + SET + media_path = regexp_replace(media_path, '^static/medias/', '') + WHERE + media_path IS NOT NULL + """ + ) + op.execute( + """ + UPDATE + gn_commons.t_mobile_apps + SET + relative_path_apk = regexp_replace(relative_path_apk, '^static/mobile/', '') + WHERE + relative_path_apk IS NOT NULL + """ + ) + + +def downgrade(): + op.execute( + """ + UPDATE + gn_commons.t_medias + SET + media_path = 'static/medias/' || media_path + WHERE + media_path IS NOT NULL + """ + ) + op.execute( + """ + UPDATE + gn_commons.t_mobile_apps + SET + relative_path_apk = 'static/mobile/' || relative_path_apk + WHERE + relative_path_apk IS NOT NULL + """ + ) diff --git a/backend/geonature/migrations/versions/36d0bd313a47_add_notification_schema_tables_and_data.py b/backend/geonature/migrations/versions/36d0bd313a47_add_notification_schema_tables_and_data.py index 8c898859f2..efd19159d8 100644 --- a/backend/geonature/migrations/versions/36d0bd313a47_add_notification_schema_tables_and_data.py +++ b/backend/geonature/migrations/versions/36d0bd313a47_add_notification_schema_tables_and_data.py @@ -22,7 +22,6 @@ def upgrade(): - # Create new schema logger.info("Create schema " + SCHEMA_NAME) op.execute("CREATE SCHEMA " + SCHEMA_NAME) @@ -163,6 +162,5 @@ def upgrade(): def downgrade(): - logger.info("DROP SCHEMA " + SCHEMA_NAME + " CASCADE") op.execute("DROP SCHEMA " + SCHEMA_NAME + " CASCADE") diff --git a/backend/geonature/migrations/versions/497f52d996dd_additional_fields_remove_columns.py b/backend/geonature/migrations/versions/497f52d996dd_additional_fields_remove_columns.py new file mode 100644 index 0000000000..8176b1af89 --- /dev/null +++ b/backend/geonature/migrations/versions/497f52d996dd_additional_fields_remove_columns.py @@ -0,0 +1,47 @@ +"""TAdditionalFields : remove useless columns and values + +Revision ID: 497f52d996dd +Revises: 4cf3fd5d06f5 +Create Date: 2023-01-04 16:02:45.953579 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "497f52d996dd" +down_revision = "8888e5cce63b" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE gn_commons.t_additional_fields + DROP COLUMN key_label; + ALTER TABLE gn_commons.t_additional_fields + DROP COLUMN key_value; + """ + ) + op.execute( + """ + UPDATE gn_commons.t_additional_fields + SET id_widget = (SELECT id_widget FROM gn_commons.bib_widgets WHERE widget_name = 'radio') + WHERE id_widget = (SELECT id_widget FROM gn_commons.bib_widgets WHERE widget_name = 'bool_radio') + """ + ) + op.execute("DELETE FROM gn_commons.bib_widgets WHERE widget_name = 'bool_radio'") + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_commons.t_additional_fields + ADD COLUMN key_label varchar(255); + ALTER TABLE gn_commons.t_additional_fields + ADD COLUMN key_value varchar(255); + """ + ) + op.execute("INSERT INTO gn_commons.bib_widgets(widget_name) VALUES ('bool_radio')") diff --git a/backend/geonature/migrations/versions/5d65f9c93a32_fix_permissions_view.py b/backend/geonature/migrations/versions/5d65f9c93a32_fix_permissions_view.py new file mode 100644 index 0000000000..140133a23a --- /dev/null +++ b/backend/geonature/migrations/versions/5d65f9c93a32_fix_permissions_view.py @@ -0,0 +1,183 @@ +"""fix permissions view + +Revision ID: 5d65f9c93a32 +Revises: 0cae32a010ea +Create Date: 2023-02-20 17:49:03.156681 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "5d65f9c93a32" +down_revision = "0cae32a010ea" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + CREATE OR REPLACE VIEW gn_permissions.v_roles_permissions + AS WITH direct_permissions AS ( + -- User and group direct permissions + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = u.id_role + ), inherited_permissions AS ( + -- User permissions inherited from group + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN utilisateurs.cor_roles ug ON ug.id_role_utilisateur = u.id_role + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = ug.id_role_groupe + ), all_permissions AS ( + SELECT id_role, + nom_role, + prenom_role, + groupe, + id_organisme, + id_action, + id_filter, + id_module, + id_object, + id_permission + FROM direct_permissions + UNION + SELECT id_role, + nom_role, + prenom_role, + groupe, + id_organisme, + id_action, + id_filter, + id_module, + id_object, + id_permission + FROM inherited_permissions + ) + SELECT v.id_role, + v.nom_role, + v.prenom_role, + v.id_organisme, + v.id_module, + modules.module_code, + obj.code_object, + v.id_action, + v.id_filter, + actions.code_action, + actions.description_action, + filters.value_filter, + filters.label_filter, + filter_type.code_filter_type, + filter_type.id_filter_type, + v.id_permission + FROM all_permissions v + JOIN gn_permissions.t_actions actions ON actions.id_action = v.id_action + JOIN gn_permissions.t_filters filters ON filters.id_filter = v.id_filter + JOIN gn_permissions.t_objects obj ON obj.id_object = v.id_object + JOIN gn_permissions.bib_filters_type filter_type ON filters.id_filter_type = filter_type.id_filter_type + JOIN gn_commons.t_modules modules ON modules.id_module = v.id_module; + """ + ) + + +def downgrade(): + op.execute( + """ + CREATE OR REPLACE VIEW gn_permissions.v_roles_permissions + AS WITH p_user_permission AS ( + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = u.id_role + WHERE u.groupe = false + ), p_groupe_permission AS ( + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN utilisateurs.cor_roles g ON g.id_role_utilisateur = u.id_role OR g.id_role_groupe = u.id_role + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = g.id_role_groupe + ), all_user_permission AS ( + SELECT p_user_permission.id_role, + p_user_permission.nom_role, + p_user_permission.prenom_role, + p_user_permission.groupe, + p_user_permission.id_organisme, + p_user_permission.id_action, + p_user_permission.id_filter, + p_user_permission.id_module, + p_user_permission.id_object, + p_user_permission.id_permission + FROM p_user_permission + UNION + SELECT p_groupe_permission.id_role, + p_groupe_permission.nom_role, + p_groupe_permission.prenom_role, + p_groupe_permission.groupe, + p_groupe_permission.id_organisme, + p_groupe_permission.id_action, + p_groupe_permission.id_filter, + p_groupe_permission.id_module, + p_groupe_permission.id_object, + p_groupe_permission.id_permission + FROM p_groupe_permission + ) + SELECT v.id_role, + v.nom_role, + v.prenom_role, + v.id_organisme, + v.id_module, + modules.module_code, + obj.code_object, + v.id_action, + v.id_filter, + actions.code_action, + actions.description_action, + filters.value_filter, + filters.label_filter, + filter_type.code_filter_type, + filter_type.id_filter_type, + v.id_permission + FROM all_user_permission v + JOIN gn_permissions.t_actions actions ON actions.id_action = v.id_action + JOIN gn_permissions.t_filters filters ON filters.id_filter = v.id_filter + JOIN gn_permissions.t_objects obj ON obj.id_object = v.id_object + JOIN gn_permissions.bib_filters_type filter_type ON filters.id_filter_type = filter_type.id_filter_type + JOIN gn_commons.t_modules modules ON modules.id_module = v.id_module; + """ + ) diff --git a/backend/geonature/migrations/versions/7fe46b0e4729_multiple_filters_per_permission.py b/backend/geonature/migrations/versions/7fe46b0e4729_multiple_filters_per_permission.py new file mode 100644 index 0000000000..7566a7f027 --- /dev/null +++ b/backend/geonature/migrations/versions/7fe46b0e4729_multiple_filters_per_permission.py @@ -0,0 +1,331 @@ +"""multiple filters per permission + +Revision ID: 7fe46b0e4729 +Revises: cf1c1fdbde77 +Create Date: 2023-04-12 14:38:44.788935 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import Column, ForeignKey, Integer, Unicode +from sqlalchemy.types import ARRAY + + +# revision identifiers, used by Alembic. +revision = "7fe46b0e4729" +down_revision = "0630b93bcfe0" +branch_labels = None +depends_on = None + + +def upgrade(): + # Remove unused table + op.drop_table(schema="gn_permissions", table_name="cor_filter_type_module") + + # Rename tables with better names + op.rename_table( + schema="gn_permissions", old_table_name="t_actions", new_table_name="bib_actions" + ) + op.rename_table( + schema="gn_permissions", + old_table_name="cor_role_action_filter_module_object", + new_table_name="t_permissions", + ) + + # Remove triggers + op.execute("DROP TRIGGER tri_check_no_multiple_scope_perm ON gn_permissions.t_permissions") + op.execute("DROP FUNCTION gn_permissions.fct_tri_does_user_have_already_scope_filter()") + + # Remove SCOPE '3' as equivalent to no filters + op.alter_column( + schema="gn_permissions", + table_name="t_permissions", + column_name="id_filter", + nullable=True, + ) + op.execute( + """ + UPDATE + gn_permissions.t_permissions + SET + id_filter = NULL + WHERE + id_filter = ( + SELECT + f.id_filter + FROM + gn_permissions.t_filters f + JOIN + gn_permissions.bib_filters_type t USING (id_filter_type) + WHERE + t.code_filter_type = 'SCOPE' + AND + f.value_filter = '3' + ) + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.t_filters + WHERE + id_filter_type = (SELECT id_filter_type FROM gn_permissions.bib_filters_type WHERE code_filter_type = 'SCOPE') + AND + value_filter = '3' + """ + ) + + # Migrate t_permissions.id_filter to t_permissions.filter_scope + op.create_table( + "bib_filters_scope", + Column("value", Integer, primary_key=True), + Column("label", Unicode), + Column("description", Unicode), + schema="gn_permissions", + ) + op.execute( + """ + INSERT INTO + gn_permissions.bib_filters_scope ( + value, + label, + description + ) + SELECT + f.value_filter::int, + f.label_filter, + f.description_filter + FROM + gn_permissions.t_filters f + JOIN + gn_permissions.bib_filters_type t USING (id_filter_type) + WHERE + t.code_filter_type = 'SCOPE' + """ + ) + op.add_column( + schema="gn_permissions", + table_name="t_permissions", + column=Column( + "scope_value", + Integer, + ForeignKey("gn_permissions.bib_filters_scope.value"), + nullable=True, + ), + ) + op.execute( + """ + UPDATE + gn_permissions.t_permissions p + SET + scope_value = s.value + FROM + gn_permissions.t_filters f + JOIN + gn_permissions.bib_filters_scope s ON s.value = f.value_filter::int + JOIN + gn_permissions.bib_filters_type t ON t.id_filter_type = f.id_filter_type + WHERE + t.code_filter_type = 'SCOPE' + AND + p.id_filter = f.id_filter + """ + ) + op.drop_column(schema="gn_permissions", table_name="t_permissions", column_name="id_filter") + op.drop_table(schema="gn_permissions", table_name="t_filters") + + +def downgrade(): + op.create_table( + "t_filters", + Column("id_filter", Integer, primary_key=True), + Column("label_filter", Unicode(255)), + Column("value_filter", Unicode), + Column("description_filter", Unicode), + Column( + "id_filter_type", + Integer, + ForeignKey( + "gn_permissions.bib_filters_type.id_filter_type", + name="fk_t_filters_id_filter_type", + ), + ), + schema="gn_permissions", + ) + op.execute( + """ + INSERT INTO + gn_permissions.t_filters ( + label_filter, + value_filter, + description_filter, + id_filter_type + ) + SELECT + s.label, + s.value::varchar, + s.description, + t.id_filter_type + FROM + gn_permissions.bib_filters_scope s + JOIN + gn_permissions.bib_filters_type t ON t.code_filter_type = 'SCOPE' + """ + ) + op.add_column( + schema="gn_permissions", + table_name="t_permissions", + column=Column( + "id_filter", + Integer, + ForeignKey("gn_permissions.t_filters.id_filter", name="fk_cor_r_a_f_m_o_id_filter"), + nullable=True, + ), + ) + # Copy scope_value into id_filter + op.execute( + """ + UPDATE + gn_permissions.t_permissions p + SET + id_filter = f.id_filter + FROM ( + SELECT + p.id_permission, + f.id_filter + FROM + gn_permissions.t_permissions p + JOIN + gn_permissions.bib_filters_scope s ON p.scope_value = s.value + JOIN + gn_permissions.t_filters f ON f.value_filter::int = s.value + JOIN + gn_permissions.bib_filters_type t ON t.id_filter_type = f.id_filter_type + WHERE + t.code_filter_type = 'SCOPE' + AND + p.scope_value IS NOT NULL + ) f + WHERE + p.id_permission = f.id_permission + """ + ) + op.drop_column(schema="gn_permissions", table_name="t_permissions", column_name="scope_value") + op.drop_table(schema="gn_permissions", table_name="bib_filters_scope") + # Set SCOPE=3 for permissions without any filters + op.execute( + """ + INSERT INTO + gn_permissions.t_filters (id_filter_type, label_filter, value_filter, description_filter) + VALUES ( + (SELECT id_filter_type FROM gn_permissions.bib_filters_type WHERE code_filter_type = 'SCOPE'), + 'Toutes les données', + '3', + 'Toutes les données' + ) + """ + ) + op.execute( + """ + UPDATE + gn_permissions.t_permissions p + SET + id_filter = ( + SELECT + f.id_filter + FROM + gn_permissions.t_filters f + JOIN + gn_permissions.bib_filters_type t USING (id_filter_type) + WHERE + t.code_filter_type = 'SCOPE' + AND + f.value_filter = '3' + ) + WHERE + id_filter IS NULL + """ + ) + op.alter_column( + schema="gn_permissions", + table_name="t_permissions", + column_name="id_filter", + nullable=False, + ) + # op.drop_table(schema="gn_permissions", table_name="t_filters") + op.execute( + """ + CREATE FUNCTION gn_permissions.fct_tri_does_user_have_already_scope_filter() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- Check if a role has already a SCOPE permission for an action/module/object + -- use in constraint to force not set multiple scope permission on the same action/module/object + DECLARE + the_code_filter_type character varying; + the_nb_permission integer; + BEGIN + SELECT INTO the_code_filter_type bib.code_filter_type + FROM gn_permissions.t_filters f + JOIN gn_permissions.bib_filters_type bib ON bib.id_filter_type = f.id_filter_type + WHERE f.id_filter = NEW.id_filter + ; + -- if the filter type is NOT SCOPE, its OK to set multiple permissions + IF the_code_filter_type != 'SCOPE' THEN + RETURN NEW; + -- if the new filter is 'SCOPE TYPE', check if there is not already a permission for this + -- action/module/object/role + ELSE + SELECT INTO the_nb_permission count(perm.id_permission) + FROM gn_permissions.cor_role_action_filter_module_object perm + JOIN gn_permissions.t_filters f ON f.id_filter = perm.id_filter + JOIN gn_permissions.bib_filters_type bib ON bib.id_filter_type = f.id_filter_type AND bib.code_filter_type = 'SCOPE' + WHERE id_role=NEW.id_role AND id_action=NEW.id_action AND id_module=NEW.id_module AND id_object=NEW.id_object; + + -- if its an insert 0 row must be present, if its an update 1 row must be present + IF(TG_OP = 'INSERT' AND the_nb_permission = 0) OR (TG_OP = 'UPDATE' AND the_nb_permission = 1) THEN + RETURN NEW; + END IF; + BEGIN + RAISE EXCEPTION 'ATTENTION: il existe déjà un enregistrement de type SCOPE pour le role % l''action % sur le module % et l''objet % . Il est interdit de définir plusieurs portées à un role pour le même action sur un module et un objet', NEW.id_role, NEW.id_action, NEW.id_module, NEW.id_object ; + END; + + + END IF; + + END; + + $function$ + ; + """ + ) + op.execute( + """ + CREATE TRIGGER tri_check_no_multiple_scope_perm + BEFORE INSERT OR UPDATE + ON gn_permissions.t_permissions + FOR EACH ROW + EXECUTE PROCEDURE gn_permissions.fct_tri_does_user_have_already_scope_filter() + """ + ) + op.rename_table( + schema="gn_permissions", + old_table_name="t_permissions", + new_table_name="cor_role_action_filter_module_object", + ) + op.rename_table( + schema="gn_permissions", old_table_name="bib_actions", new_table_name="t_actions" + ) + op.create_table( + "cor_filter_type_module", + Column( + "id_filter_type", + Integer, + ForeignKey("gn_permissions.bib_filters_type.id_filter_type"), + primary_key=True, + ), + Column( + "id_module", Integer, ForeignKey("gn_commons.t_modules.id_module"), primary_key=True + ), + schema="gn_permissions", + ) diff --git a/backend/geonature/migrations/versions/8279ce74006b_set_default_modules_type.py b/backend/geonature/migrations/versions/8279ce74006b_set_default_modules_type.py new file mode 100644 index 0000000000..ef94f28d0d --- /dev/null +++ b/backend/geonature/migrations/versions/8279ce74006b_set_default_modules_type.py @@ -0,0 +1,56 @@ +"""set modules default type + +Revision ID: 8279ce74006b +Revises: 5d65f9c93a32 +Create Date: 2023-02-22 12:47:26.727855 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "8279ce74006b" +down_revision = "5d65f9c93a32" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'base' + WHERE + type IS NULL + """ + ) + op.alter_column( + schema="gn_commons", + table_name="t_modules", + column_name="type", + nullable=False, + server_default="base", + ) + + +def downgrade(): + op.alter_column( + schema="gn_commons", + table_name="t_modules", + column_name="type", + nullable=True, + server_default=None, + ) + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = NULL + WHERE + type = 'base' + """ + ) diff --git a/backend/geonature/migrations/versions/8888e5cce63b_add_index_on_id_area_for_table_cor_area_synthese.py b/backend/geonature/migrations/versions/8888e5cce63b_add_index_on_id_area_for_table_cor_area_synthese.py new file mode 100644 index 0000000000..1ee6ae8028 --- /dev/null +++ b/backend/geonature/migrations/versions/8888e5cce63b_add_index_on_id_area_for_table_cor_area_synthese.py @@ -0,0 +1,29 @@ +"""add index on id_area for table cor_area_synthese + +Revision ID: 8888e5cce63b +Revises: 09a637f06b96 +Create Date: 2023-01-18 17:34:54.298323 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "8888e5cce63b" +down_revision = "09a637f06b96" +branch_labels = None +depends_on = None + +SCHEMA_NAME = "gn_synthese" +TABLE_NAME = "cor_area_synthese" +INDEX_NAME = "i_cor_area_synthese_id_area" +COLUMN_NAME = "id_area" + + +def upgrade(): + op.create_index(INDEX_NAME, schema=SCHEMA_NAME, table_name=TABLE_NAME, columns=[COLUMN_NAME]) + + +def downgrade(): + op.drop_index(INDEX_NAME, schema=SCHEMA_NAME, table_name=TABLE_NAME) diff --git a/backend/geonature/migrations/versions/95acee9f0452_add_comment_notification.py b/backend/geonature/migrations/versions/95acee9f0452_add_comment_notification.py new file mode 100644 index 0000000000..ae78ace0d1 --- /dev/null +++ b/backend/geonature/migrations/versions/95acee9f0452_add_comment_notification.py @@ -0,0 +1,100 @@ +"""add comment notification + +Revision ID: 95acee9f0452 +Revises: 9e9218653d6c +Create Date: 2023-04-06 19:02:39.863972 + +""" +import sqlalchemy as sa +from alembic import op + +from geonature.core.notifications.models import ( + NotificationCategory, + NotificationRule, + NotificationTemplate, +) + +# revision identifiers, used by Alembic. +revision = "95acee9f0452" +down_revision = "e2a94808cf76" +branch_labels = None +depends_on = ("09a637f06b96",) # Geonature Notifications + +CATEGORY_CODE = "OBSERVATION-COMMENT" +EMAIL_CONTENT = ( + "

    Bonjour {{ role.nom_complet }} !

    " + "

    {{ user.nom_complet }} a commenté l'observation de {{ synthese.nom_cite }} du {{ synthese.meta_create_date.strftime('%d-%m-%Y') }}" + "que vous avez créée ou commentée

    " + '

    Vous pouvez y accéder directement ici

    ' + "

    Vous recevez cet email automatiquement via le service de notification de GeoNature.

    " +) +DB_CONTENT = ( + "{{ user.nom_complet }} a commenté l'observation de {{ synthese.nom_cite }} du " + "{{ synthese.meta_create_date.strftime('%d-%m-%Y') }} que vous avez créée ou commentée" +) + + +def upgrade(): + bind = op.get_bind() + session = sa.orm.Session(bind=bind) + + # Add category + category = NotificationCategory( + code=CATEGORY_CODE, + label="Nouveau commentaire sur une observation", + description=( + "Se déclenche lorsqu'un nouveau commentaire est ajouté à une de vos observations, ou une observation que vous avez commenté" + ), + ) + + session.add(category) + + for method, content in (("EMAIL", EMAIL_CONTENT), ("DB", DB_CONTENT)): + template = NotificationTemplate(category=category, code_method=method, content=content) + session.add(template) + + session.commit() + + op.execute( + f""" + INSERT INTO + gn_notifications.t_notifications_rules (code_category, code_method) + VALUES + ('{CATEGORY_CODE}', 'DB'), + ('{CATEGORY_CODE}', 'EMAIL') + """ + ) + + +def downgrade(): + bind = op.get_bind() + session = sa.orm.Session(bind=bind) + # Do not use NotificationCategory.query as it is not the same session! + category = ( + session.query(NotificationCategory) + .filter(NotificationCategory.code == CATEGORY_CODE) + .one_or_none() + ) + + if category is not None: + session.query(NotificationRule).filter( + NotificationRule.code_category == category.code + ).delete() + # Since there is no cascade, need to delete template manually + session.query(NotificationTemplate).filter( + NotificationTemplate.code_category == category.code + ).delete() + + session.delete(category) + session.commit() + + op.execute( + f""" + DELETE FROM + gn_notifications.t_notifications_rules + WHERE + code_category = '{CATEGORY_CODE}' + AND + id_role IS NULL + """ + ) diff --git a/backend/geonature/migrations/versions/9e9218653d6c_add_synthese_log_history.py b/backend/geonature/migrations/versions/9e9218653d6c_add_synthese_log_history.py new file mode 100644 index 0000000000..28fe308da0 --- /dev/null +++ b/backend/geonature/migrations/versions/9e9218653d6c_add_synthese_log_history.py @@ -0,0 +1,66 @@ +"""add synthese log history + +Revision ID: 9e9218653d6c +Revises: 0cae32a010ea +Create Date: 2022-04-06 15:39:37.428357 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "9e9218653d6c" +down_revision = "cec41a6d3a15" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "t_log_synthese", + sa.Column("id_synthese", sa.Integer, primary_key=True), + sa.Column("last_action", sa.CHAR(1), nullable=False), + sa.Column("meta_last_action_date", sa.TIMESTAMP, server_default=sa.func.now()), + schema="gn_synthese", + ) + op.execute( + """ + CREATE FUNCTION gn_synthese.fct_tri_log_delete_on_synthese() RETURNS TRIGGER AS + $BODY$ + DECLARE + BEGIN + -- log id/uuid of deleted datas into specific log table + IF (TG_OP = 'DELETE') THEN + INSERT INTO gn_synthese.t_log_synthese + SELECT + o.id_synthese AS id_synthese + , 'D' AS last_action + , now() AS meta_last_action_date + from old_table o + ON CONFLICT (id_synthese) + DO UPDATE SET last_action = 'D', meta_last_action_date = now(); + END IF; + RETURN NULL; + END; + $BODY$ LANGUAGE plpgsql COST 100 + ; + CREATE TRIGGER tri_log_delete_synthese + AFTER DELETE + ON gn_synthese.synthese + REFERENCING OLD TABLE AS old_table + FOR EACH STATEMENT + EXECUTE FUNCTION gn_synthese.fct_tri_log_delete_on_synthese() + ; + """ + ) + + +def downgrade(): + op.execute( + """ + DROP TRIGGER tri_log_delete_synthese ON gn_synthese.synthese; + DROP FUNCTION gn_synthese.fct_tri_log_delete_on_synthese(); + """ + ) + op.drop_table("t_log_synthese", schema="gn_synthese") diff --git a/backend/geonature/migrations/versions/cec41a6d3a15_revome_v_roles_permissions.py b/backend/geonature/migrations/versions/cec41a6d3a15_revome_v_roles_permissions.py new file mode 100644 index 0000000000..36c1add90f --- /dev/null +++ b/backend/geonature/migrations/versions/cec41a6d3a15_revome_v_roles_permissions.py @@ -0,0 +1,104 @@ +"""revome v_roles_permissions + +Revision ID: cec41a6d3a15 +Revises: 05a91edb6796 +Create Date: 2023-02-23 22:21:30.039893 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "cec41a6d3a15" +down_revision = "05a91edb6796" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("DROP VIEW gn_permissions.v_roles_permissions") + + +def downgrade(): + op.execute( + """ + CREATE OR REPLACE VIEW gn_permissions.v_roles_permissions + AS WITH direct_permissions AS ( + -- User and group direct permissions + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = u.id_role + ), inherited_permissions AS ( + -- User permissions inherited from group + SELECT u.id_role, + u.nom_role, + u.prenom_role, + u.groupe, + u.id_organisme, + c_1.id_action, + c_1.id_filter, + c_1.id_module, + c_1.id_object, + c_1.id_permission + FROM utilisateurs.t_roles u + JOIN utilisateurs.cor_roles ug ON ug.id_role_utilisateur = u.id_role + JOIN gn_permissions.cor_role_action_filter_module_object c_1 ON c_1.id_role = ug.id_role_groupe + ), all_permissions AS ( + SELECT id_role, + nom_role, + prenom_role, + groupe, + id_organisme, + id_action, + id_filter, + id_module, + id_object, + id_permission + FROM direct_permissions + UNION + SELECT id_role, + nom_role, + prenom_role, + groupe, + id_organisme, + id_action, + id_filter, + id_module, + id_object, + id_permission + FROM inherited_permissions + ) + SELECT v.id_role, + v.nom_role, + v.prenom_role, + v.id_organisme, + v.id_module, + modules.module_code, + obj.code_object, + v.id_action, + v.id_filter, + actions.code_action, + actions.description_action, + filters.value_filter, + filters.label_filter, + filter_type.code_filter_type, + filter_type.id_filter_type, + v.id_permission + FROM all_permissions v + JOIN gn_permissions.t_actions actions ON actions.id_action = v.id_action + JOIN gn_permissions.t_filters filters ON filters.id_filter = v.id_filter + JOIN gn_permissions.t_objects obj ON obj.id_object = v.id_object + JOIN gn_permissions.bib_filters_type filter_type ON filters.id_filter_type = filter_type.id_filter_type + JOIN gn_commons.t_modules modules ON modules.id_module = v.id_module; + """ + ) diff --git a/backend/geonature/migrations/versions/cf1c1fdbde77_correction_sql_on_delete_module.py b/backend/geonature/migrations/versions/cf1c1fdbde77_correction_sql_on_delete_module.py new file mode 100644 index 0000000000..df8df23c6d --- /dev/null +++ b/backend/geonature/migrations/versions/cf1c1fdbde77_correction_sql_on_delete_module.py @@ -0,0 +1,135 @@ +"""correction-sql-on-delete-module + +Revision ID: cf1c1fdbde77 +Revises: 9e9218653d6c +Create Date: 2023-04-11 11:22:39.603084 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "cf1c1fdbde77" +down_revision = "9e9218653d6c" +branch_labels = None +depends_on = None + + +def upgrade(): + # Removing rows in 'cor_role_action_filter_module_object' which have non-valid values for 'id_module' field : + # values that are not in 'gn_commons.t_modules' + op.execute( + """ + DELETE + FROM + gn_permissions.cor_role_action_filter_module_object cr + WHERE + NOT EXISTS ( + SELECT + NULL + FROM + gn_commons.t_modules m + WHERE + cr.id_module = m.id_module); + """ + ) + + # Adding FK 'fk_cor_r_a_f_m_o_id_module' + op.execute( + """ + ALTER TABLE gn_permissions.cor_role_action_filter_module_object + ADD CONSTRAINT fk_cor_r_a_f_m_o_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE CASCADE + ON UPDATE CASCADE; + """ + ) + + # Modifying FK 'fk_t_base_visits_id_module' + op.execute( + """ + ALTER TABLE gn_monitoring.t_base_visits + DROP CONSTRAINT fk_t_base_visits_id_module; + ALTER TABLE gn_monitoring.t_base_visits + ADD CONSTRAINT fk_t_base_visits_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE NO ACTION -- previously 'ON DELETE CASCADE' + ON UPDATE CASCADE; -- previously 'ON UPDATE CASCADE' + """ + ) + + # Modifying FK 'fk_cor_filter_module_id_module' + op.execute( + """ + ALTER TABLE gn_permissions.cor_filter_type_module + DROP CONSTRAINT fk_cor_filter_module_id_module; + ALTER TABLE gn_permissions.cor_filter_type_module + ADD CONSTRAINT fk_cor_filter_module_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE CASCADE -- previously 'ON DELETE NO ACTION' + ON UPDATE CASCADE; -- previously 'ON UPDATE CASCADE' + """ + ) + + # Modifying FK 'fk_cor_site_module_id_module' + op.execute( + """ + ALTER TABLE gn_monitoring.cor_site_module + DROP CONSTRAINT fk_cor_site_module_id_module; + ALTER TABLE gn_monitoring.cor_site_module + ADD CONSTRAINT fk_cor_site_module_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE CASCADE -- previously 'ON DELETE NO ACTION' + ON UPDATE CASCADE; -- previously 'ON DELETE NO ACTION' + """ + ) + + +def downgrade(): + # Removing FK 'fk_cor_r_a_f_m_o_id_module' + op.execute( + """ + ALTER TABLE gn_permissions.cor_role_action_filter_module_object + DROP CONSTRAINT fk_cor_r_a_f_m_o_id_module; + """ + ) + + # Modifying back FK 'fk_t_base_visits_id_module' + op.execute( + """ + ALTER TABLE gn_monitoring.t_base_visits + DROP CONSTRAINT fk_t_base_visits_id_module; + ALTER TABLE gn_monitoring.t_base_visits + ADD CONSTRAINT fk_t_base_visits_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE CASCADE + ON UPDATE CASCADE; + """ + ) + + # Modifying back FK 'fk_cor_filter_module_id_module' + op.execute( + """ + ALTER TABLE gn_permissions.cor_filter_type_module + DROP CONSTRAINT fk_cor_filter_module_id_module; + ALTER TABLE gn_permissions.cor_filter_type_module + ADD CONSTRAINT fk_cor_filter_module_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE NO ACTION + ON UPDATE CASCADE; + """ + ) + + # Modifying back FK 'fk_cor_site_module_id_module' + op.execute( + """ + ALTER TABLE gn_monitoring.cor_site_module + DROP CONSTRAINT fk_cor_site_module_id_module; + ALTER TABLE gn_monitoring.cor_site_module + ADD CONSTRAINT fk_cor_site_module_id_module FOREIGN KEY (id_module) + REFERENCES gn_commons.t_modules (id_module) + ON DELETE NO ACTION + ON UPDATE NO ACTION; + """ + ) diff --git a/backend/geonature/migrations/versions/df5a5099e084_add_additional_fields_object.py b/backend/geonature/migrations/versions/df5a5099e084_add_additional_fields_object.py new file mode 100644 index 0000000000..44325b3a2f --- /dev/null +++ b/backend/geonature/migrations/versions/df5a5099e084_add_additional_fields_object.py @@ -0,0 +1,60 @@ +"""add additional fields object + +Revision ID: df5a5099e084 +Revises: 0630b93bcfe0 +Create Date: 2023-04-20 10:46:00.334251 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "df5a5099e084" +down_revision = "95acee9f0452" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_permissions.t_objects (code_object, description_object) + VALUES + ( + 'ADDITIONAL_FIELDS', + 'Gestion du backoffice des champs additionnels' + ), ( + 'MOBILE_APPS', + 'Gestion des applications mobiles' + ), ( + 'MODULES', + 'Gestion des modules' + ) + """ + ) + op.execute( + """ + INSERT INTO + gn_permissions.cor_object_module (id_object, id_module) + SELECT + o.id_object, + (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'ADMIN') + FROM + ( + VALUES ('ADDITIONAL_FIELDS'), ('MOBILE_APPS'), ('MODULES') + ) AS v (code_object) + JOIN + gn_permissions.t_objects o ON o.code_object = v.code_object + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM gn_permissions.t_objects + WHERE code_object IN ('ADDITIONAL_FIELDS', 'MOBILE_APPS', 'MODULES') + """ + ) diff --git a/backend/geonature/migrations/versions/e2a94808cf76_add_notifications_object.py b/backend/geonature/migrations/versions/e2a94808cf76_add_notifications_object.py new file mode 100644 index 0000000000..ed42c1eea4 --- /dev/null +++ b/backend/geonature/migrations/versions/e2a94808cf76_add_notifications_object.py @@ -0,0 +1,43 @@ +"""add notifications object + +Revision ID: e2a94808cf76 +Revises: cf1c1fdbde77 +Create Date: 2023-04-14 18:16:57.981499 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "e2a94808cf76" +down_revision = "cf1c1fdbde77" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_permissions.t_objects (code_object, description_object) + VALUES ( + 'NOTIFICATIONS', + 'Gestion du backoffice des notifications' + ) + """ + ) + op.execute( + """ + INSERT INTO + gn_permissions.cor_object_module (id_object, id_module) + VALUES ( + (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'NOTIFICATIONS'), + (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'ADMIN') + ) + """ + ) + + +def downgrade(): + op.execute("DELETE FROM gn_permissions.t_objects WHERE code_object = 'NOTIFICATIONS'") diff --git a/backend/geonature/migrations/versions/f051b88a57fd_permissions_available.py b/backend/geonature/migrations/versions/f051b88a57fd_permissions_available.py new file mode 100644 index 0000000000..01a0255c21 --- /dev/null +++ b/backend/geonature/migrations/versions/f051b88a57fd_permissions_available.py @@ -0,0 +1,147 @@ +"""permissions available + +Revision ID: f051b88a57fd +Revises: 7fe46b0e4729 +Create Date: 2023-04-14 17:19:36.490766 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.types import Integer, Boolean, Unicode + + +# revision identifiers, used by Alembic. +revision = "f051b88a57fd" +down_revision = "7fe46b0e4729" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "t_permissions_available", + sa.Column( + "id_module", + Integer, + sa.ForeignKey("gn_commons.t_modules.id_module"), + primary_key=True, + ), + sa.Column( + "id_object", + Integer, + sa.ForeignKey("gn_permissions.t_objects.id_object"), + primary_key=True, + ), + sa.Column( + "id_action", + Integer, + sa.ForeignKey("gn_permissions.bib_actions.id_action"), + primary_key=True, + ), + sa.Column( + "label", + Unicode, + ), + sa.Column( + "scope_filter", + Boolean, + server_default=sa.false(), + ), + schema="gn_permissions", + ) + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('ADMIN', 'PERMISSIONS', 'C', False, 'Créer des permissions') + ,('ADMIN', 'PERMISSIONS', 'R', False, 'Voir les permissions') + ,('ADMIN', 'PERMISSIONS', 'U', False, 'Modifier les permissions') + ,('ADMIN', 'PERMISSIONS', 'E', False, 'Exporter les permissions') + ,('ADMIN', 'PERMISSIONS', 'D', False, 'Supprimer des permissions') + ,('ADMIN', 'NOMENCLATURES', 'C', False, 'Créer des nomenclatures') + ,('ADMIN', 'NOMENCLATURES', 'R', False, 'Voir les nomenclatures') + ,('ADMIN', 'NOMENCLATURES', 'U', False, 'Modifier les nomenclatures') + ,('ADMIN', 'NOMENCLATURES', 'E', False, 'Exporter les nomenclatures') + ,('ADMIN', 'NOMENCLATURES', 'D', False, 'Supprimer des nomenclatures') + ,('ADMIN', 'NOTIFICATIONS', 'C', False, 'Créer des entrées dans l’administration des notifications') + ,('ADMIN', 'NOTIFICATIONS', 'R', False, 'Voir les entrées dans l’administration des notifications') + ,('ADMIN', 'NOTIFICATIONS', 'U', False, 'Modifier des entrées dans l’administration des notifications') + ,('ADMIN', 'NOTIFICATIONS', 'E', False, 'Exporter les entrées dans l’administration des notifications') + ,('ADMIN', 'NOTIFICATIONS', 'D', False, 'Supprimer des entrées dans l’administration des notifications') + ,('ADMIN', 'ADDITIONAL_FIELDS', 'C', False, 'Créer des champs additionnels') + ,('ADMIN', 'ADDITIONAL_FIELDS', 'R', False, 'Voir les champs additionnels') + ,('ADMIN', 'ADDITIONAL_FIELDS', 'U', False, 'Modifier les champs additionnels') + ,('ADMIN', 'ADDITIONAL_FIELDS', 'E', False, 'Exporter les champs additionnels') + ,('ADMIN', 'ADDITIONAL_FIELDS', 'D', False, 'Supprimer des champs additionnels') + ,('ADMIN', 'MODULES', 'R', False, 'Voir les modules') + ,('ADMIN', 'MODULES', 'U', False, 'Modifier les modules') + ,('ADMIN', 'MODULES', 'E', False, 'Exporter les modules') + ,('ADMIN', 'MOBILE_APPS', 'C', False, 'Déclarer des applications mobiles') + ,('ADMIN', 'MOBILE_APPS', 'R', False, 'Voir les applications mobiles') + ,('ADMIN', 'MOBILE_APPS', 'U', False, 'Modifier les applications mobiles') + ,('ADMIN', 'MOBILE_APPS', 'E', False, 'Exporter les applications mobiles') + ,('ADMIN', 'MOBILE_APPS', 'D', False, 'Supprimer des applications mobiles') + ,('METADATA', 'ALL', 'C', False, 'Créer des métadonnées') + ,('METADATA', 'ALL', 'R', True, 'Voir les métadonnées') + ,('METADATA', 'ALL', 'U', True, 'Modifier les métadonnées') + ,('METADATA', 'ALL', 'E', True, 'Exporter les métadonnées') + ,('METADATA', 'ALL', 'D', True, 'Supprimer des métadonnées') + ,('SYNTHESE', 'ALL', 'R', True, 'Voir les observations') + ,('SYNTHESE', 'ALL', 'E', True, 'Exporter les observations') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + """ + ) + op.execute( + """ + WITH bad_permissions AS ( + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_commons.t_modules m + USING (id_module) + WHERE + m.module_code IN ('GEONATURE', 'ADMIN', 'METADATA', 'SYNTHESE') + EXCEPT + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.t_permissions_available pa ON + (p.id_module = pa.id_module + AND p.id_object = pa.id_object + AND p.id_action = pa.id_action) + ) + DELETE + FROM + gn_permissions.t_permissions p + USING bad_permissions bp + WHERE + bp.id_permission = p.id_permission; + """ + ) + + +def downgrade(): + op.drop_table(schema="gn_permissions", table_name="t_permissions_available") diff --git a/backend/geonature/migrations/versions/f1dd984bff97_add_sensitivity_filter.py b/backend/geonature/migrations/versions/f1dd984bff97_add_sensitivity_filter.py new file mode 100644 index 0000000000..34842153c4 --- /dev/null +++ b/backend/geonature/migrations/versions/f1dd984bff97_add_sensitivity_filter.py @@ -0,0 +1,69 @@ +"""add sensitivity filter + +Revision ID: f1dd984bff97 +Revises: f051b88a57fd +Create Date: 2023-04-19 16:24:57.945428 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import Column, Boolean + + +# revision identifiers, used by Alembic. +revision = "f1dd984bff97" +down_revision = "f051b88a57fd" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_permissions", + table_name="t_permissions", + column=Column( + "sensitivity_filter", + Boolean, + server_default=sa.false(), + ), + ) + op.add_column( + schema="gn_permissions", + table_name="t_permissions_available", + column=Column( + "sensitivity_filter", + Boolean, + server_default=sa.false(), + ), + ) + op.execute( + """ + UPDATE + gn_permissions.t_permissions_available pa + SET + sensitivity_filter = True + FROM + gn_commons.t_modules m, + gn_permissions.t_objects o, + gn_permissions.bib_actions a + WHERE + pa.id_module = m.id_module + AND + pa.id_object = o.id_object + AND + pa.id_action = a.id_action + AND + m.module_code = 'SYNTHESE' AND o.code_object = 'ALL' and a.code_action = 'R' + """ + ) + + +def downgrade(): + op.drop_column( + schema="gn_permissions", table_name="t_permissions", column_name="sensitivity_filter" + ) + op.drop_column( + schema="gn_permissions", + table_name="t_permissions_available", + column_name="sensitivity_filter", + ) diff --git a/backend/geonature/templates/acquisition_framework_template_pdf.html b/backend/geonature/templates/acquisition_framework_template_pdf.html index cf62eb800c..e716288462 100644 --- a/backend/geonature/templates/acquisition_framework_template_pdf.html +++ b/backend/geonature/templates/acquisition_framework_template_pdf.html @@ -12,7 +12,7 @@ @@ -26,7 +26,7 @@ > @@ -267,7 +267,7 @@

    Répartition des espèces

    Pas d'espèces à afficher. @@ -315,4 +315,4 @@ - \ No newline at end of file + diff --git a/backend/geonature/templates/admin/hide_select2_options_create.html b/backend/geonature/templates/admin/hide_select2_options_create.html new file mode 100644 index 0000000000..1e4b5cefe4 --- /dev/null +++ b/backend/geonature/templates/admin/hide_select2_options_create.html @@ -0,0 +1,6 @@ +{% extends 'admin/model/create.html' %} + +{% block head_css %} + {{ super() }} + +{% endblock %} \ No newline at end of file diff --git a/backend/geonature/templates/admin/hide_select2_options_edit.html b/backend/geonature/templates/admin/hide_select2_options_edit.html new file mode 100644 index 0000000000..081635f402 --- /dev/null +++ b/backend/geonature/templates/admin/hide_select2_options_edit.html @@ -0,0 +1,6 @@ +{% extends 'admin/model/edit.html' %} + +{% block head_css %} + {{ super() }} + +{% endblock %} \ No newline at end of file diff --git a/backend/geonature/templates/admin/index_layout.html b/backend/geonature/templates/admin/index_layout.html index 95ae680d55..adeac99710 100644 --- a/backend/geonature/templates/admin/index_layout.html +++ b/backend/geonature/templates/admin/index_layout.html @@ -10,36 +10,38 @@ {% set children = item.get_children() %} {%- if children %} + class="list-group-item list-group-item-action main-item"> {{ layout.menu_icon(item) }}{{ item.name }} - {%- for child in children -%} - {%- if child.is_category() -%} - {{ menu(menu_root=[child]) }} - {% else %} - - {{ layout.menu_icon(child) }}{{ child.name }} - + {%- for child in children -%} + {%- if child.is_category() -%} + {{ menu(menu_root=[child]) }} + {% else %} + + + {{ layout.menu_icon(child) }}{{ child.name }} + + + {%- endif %} + {%- endfor %} - {%- endif %} - {%- endfor %} {% endif %} {%- else %} - {%- if item.is_accessible() and item.is_visible() -%} + {%- if item.is_accessible() and item.is_visible() and item.name != 'Accueil' -%} + - {{ layout.menu_icon(item) }}{{ item.name }} + {{ layout.menu_icon(item) }}{{ item.name }} {%- endif -%} {% endif -%} {% endfor %} + {% endmacro %} diff --git a/backend/geonature/templates/dataset_template_pdf.html b/backend/geonature/templates/dataset_template_pdf.html index 96f9aad909..03bef2e0d4 100644 --- a/backend/geonature/templates/dataset_template_pdf.html +++ b/backend/geonature/templates/dataset_template_pdf.html @@ -12,7 +12,7 @@ @@ -25,7 +25,7 @@ > @@ -205,18 +205,16 @@
    {{data.title}}
    --> - {% if data['chart']: %}

    Répartition des espèces

    Pas d'espèces à afficher.
    {% endif %} -

    Cadre d'acquisition

    diff --git a/backend/geonature/templates/layout.html b/backend/geonature/templates/layout.html index 74d895521f..c16398ac43 100644 --- a/backend/geonature/templates/layout.html +++ b/backend/geonature/templates/layout.html @@ -3,71 +3,92 @@ {% extends 'admin/base.html' %} -{% block head_tail %} +{% block head_css %} {{ super() }} - - {% endblock %} -{% block page_body %} +{% block head_tail %} + +{% endblock %} -
    - -
    - {% block brand %} -

    {{ admin_view.name|capitalize }}

    - {% endblock %} - {{ layout.messages() }} + +
    +
    + -
    + {{index_layout.menu()}} +
    + +
    +

    {{ admin_view.name|capitalize }}

    + {{ layout.messages() }} + {% block body %} + {% endblock %} +
    +
    -
    +
    {% endblock %} + + + diff --git a/backend/geonature/tests/fixtures.py b/backend/geonature/tests/fixtures.py index 660c125e43..411d934f75 100644 --- a/backend/geonature/tests/fixtures.py +++ b/backend/geonature/tests/fixtures.py @@ -1,11 +1,10 @@ import json -import pkg_resources import datetime import tempfile from PIL import Image import pytest -from flask import testing, url_for +from flask import testing, url_for, current_app from werkzeug.datastructures import Headers from sqlalchemy import func from shapely.geometry import Point @@ -14,10 +13,10 @@ from geonature import create_app from geonature.utils.env import db from geonature.core.gn_permissions.models import ( - TActions, - TFilters, - BibFiltersType, - CorRoleActionFilterModuleObject, + PermFilterType, + PermAction, + PermObject, + Permission, ) from geonature.core.gn_commons.models import TModules, TMedias, BibTablesLocation from geonature.core.gn_meta.models import ( @@ -27,6 +26,7 @@ CorAcquisitionFrameworkActor, ) from geonature.core.gn_synthese.models import TSources, Synthese, TReport, BibReportsTypes +from geonature.core.sensitivity.models import SensitivityRule, cor_sensitivity_area from pypnusershub.db.models import ( User, @@ -36,20 +36,32 @@ UserApplicationRight, ) from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes -from apptax.taxonomie.models import Taxref +from apptax.taxonomie.models import ( + Taxref, + TaxrefBdcStatutTaxon, + TaxrefBdcStatutCorTextValues, + bdc_statut_cor_text_area, + TaxrefBdcStatutText, +) +from ref_geo.models import LAreas + from utils_flask_sqla.tests.utils import JSONClient +import sqlalchemy as sa + __all__ = [ "datasets", "acquisition_frameworks", "synthese_data", + "synthese_sensitive_data", "source", "reports_data", - "filters", "medium", "module", - "isolate_synthese", + "perm_object", + "notifications_enabled", + "celery_eager", ] @@ -75,7 +87,8 @@ def app(): @pytest.fixture(scope="function") -def module(): +def module(users): + other_module = TModules.query.filter_by(module_code="GEONATURE").one() with db.session.begin_nested(): new_module = TModules( module_code="MODULE_1", @@ -85,26 +98,38 @@ def module(): active_backend=False, ) db.session.add(new_module) + # Copy perission from another module + with db.session.begin_nested(): + for perm in Permission.query.filter_by(id_module=other_module.id_module): + new_perm = Permission( + id_role=perm.id_role, + id_action=perm.id_action, + id_module=new_module.id_module, + id_object=perm.id_object, + scope_value=perm.scope_value, + ) + db.session.add(new_perm) return new_module +@pytest.fixture(scope="function") +def perm_object(): + with db.session.begin_nested(): + new_object = PermObject(code_object="TEST_OBJECT") + db.session.add(new_object) + return new_object + + @pytest.fixture(scope="session") def users(app): app = Application.query.filter(Application.code_application == "GN").one() profil = Profil.query.filter(Profil.nom_profil == "Lecteur").one() - modules_codes = ["GEONATURE", "SYNTHESE", "IMPORT", "OCCTAX", "METADATA"] - modules = TModules.query.filter(TModules.module_code.in_(modules_codes)).all() + modules = TModules.query.all() - actions = { - code: TActions.query.filter(TActions.code_action == code).one() for code in "CRUVED" - } - scope_filters = { - scope: TFilters.query.filter(TFilters.value_filter == str(scope)).one() - for scope in [0, 1, 2, 3] - } + actions = {code: PermAction.query.filter_by(code_action=code).one() for code in "CRUVED"} - def create_user(username, organisme=None, scope=None): + def create_user(username, organisme=None, scope=None, sensitivity_filter=False): # do not commit directly on current transaction, as we want to rollback all changes at the end of tests with db.session.begin_nested(): user = User( @@ -122,13 +147,20 @@ def create_user(username, organisme=None, scope=None): id_role=user.id_role, id_application=app.id_application, id_profil=profil.id_profil ) db.session.add(right) - if scope: + if scope > 0: + object_all = PermObject.query.filter_by(code_object="ALL").one() for action in actions.values(): for module in modules: - permission = CorRoleActionFilterModuleObject( - role=user, action=action, filter=scope, module=module - ) - db.session.add(permission) + for obj in [object_all] + module.objects: + permission = Permission( + role=user, + action=action, + module=module, + object=obj, + scope_value=scope if scope != 3 else None, + sensitivity_filter=sensitivity_filter, + ) + db.session.add(permission) return user users = {} @@ -137,12 +169,13 @@ def create_user(username, organisme=None, scope=None): db.session.add(organisme) users_to_create = [ - ("noright_user", organisme, scope_filters[0]), - ("stranger_user",), - ("associate_user", organisme, scope_filters[2]), - ("self_user", organisme, scope_filters[1]), - ("user", organisme, scope_filters[2]), - ("admin_user", organisme, scope_filters[3]), + ("noright_user", organisme, 0), + ("stranger_user", None, 2), + ("associate_user", organisme, 2), + ("self_user", organisme, 1), + ("user", organisme, 2), + ("admin_user", organisme, 3), + ("associate_user_2_exclude_sensitive", organisme, 2, True), ] for username, *args in users_to_create: @@ -173,11 +206,11 @@ def acquisition_frameworks(users): TNomenclatures.mnemonique == "Contact principal", ).one() - def create_af(creator=None): + def create_af(name, creator): with db.session.begin_nested(): af = TAcquisitionFramework( - acquisition_framework_name="test", - acquisition_framework_desc="test", + acquisition_framework_name=name, + acquisition_framework_desc=name, creator=creator, ) db.session.add(af) @@ -189,12 +222,16 @@ def create_af(creator=None): return af afs = { - "own_af": create_af(creator=users["user"]), - "associate_af": create_af(creator=users["associate_user"]), - "stranger_af": create_af(creator=users["stranger_user"]), - "orphan_af": create_af(), - "af_1": create_af(), - "af_2": create_af(), + name: create_af(name=name, creator=creator) + for name, creator in [ + ("own_af", users["user"]), + ("associate_af", users["associate_user"]), + ("stranger_af", users["stranger_user"]), + ("orphan_af", None), + ("af_1", None), + ("af_2", None), + ("af_3", None), + ] } return afs @@ -233,6 +270,7 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module): af = acquisition_frameworks["orphan_af"] af_1 = acquisition_frameworks["af_1"] af_2 = acquisition_frameworks["af_2"] + af_3 = acquisition_frameworks["af_3"] datasets = { name: create_dataset(name, id_af, digitizer) @@ -241,8 +279,14 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module): ("associate_dataset", af.id_acquisition_framework, users["associate_user"]), ("stranger_dataset", af.id_acquisition_framework, users["stranger_user"]), ("orphan_dataset", af.id_acquisition_framework, None), - ("belong_af_1", af_1.id_acquisition_framework, None), - ("belong_af_2", af_2.id_acquisition_framework, None), + ("belong_af_1", af_1.id_acquisition_framework, users["stranger_user"]), + ("belong_af_2", af_2.id_acquisition_framework, users["stranger_user"]), + ("belong_af_3", af_3.id_acquisition_framework, users["stranger_user"]), + ( + "associate_2_dataset_sensitive", + af.id_acquisition_framework, + users["associate_user_2_exclude_sensitive"], + ), ] } datasets["with_module_1"] = create_dataset( @@ -261,8 +305,9 @@ def source(): return source -def create_synthese(geom, taxon, user, dataset, source, uuid): +def create_synthese(geom, taxon, user, dataset, source, uuid, cor_observers, **kwargs): now = datetime.datetime.now() + return Synthese( id_source=source.id_source, unique_id_sinp=uuid, @@ -276,76 +321,221 @@ def create_synthese(geom, taxon, user, dataset, source, uuid): the_geom_local=func.st_transform(geom, 2154), date_min=now, date_max=now, + cor_observers=cor_observers, + **kwargs, ) @pytest.fixture() def synthese_data(app, users, datasets, source): - map_center_point = Point( - app.config["MAPCONFIG"]["CENTER"][1], - app.config["MAPCONFIG"]["CENTER"][0], - ) - geom_4326 = from_shape(map_center_point, srid=4326) - data = [] + point1 = Point(5.92, 45.56) + point2 = Point(-1.54, 46.85) + point3 = Point(-3.486786, 48.832182) + data = {} with db.session.begin_nested(): - for cd_nom in [713776, 2497]: - taxon = Taxref.query.filter_by(cd_nom=cd_nom).one() + for name, cd_nom, point, ds, comment_description in [ + ("obs1", 713776, point1, datasets["own_dataset"], "obs1"), + ("obs2", 212, point2, datasets["own_dataset"], "obs2"), + ("obs3", 2497, point3, datasets["own_dataset"], "obs3"), + ("p1_af1", 713776, point1, datasets["belong_af_1"], "p1_af1"), + ("p1_af1_2", 212, point1, datasets["belong_af_1"], "p1_af1_2"), + ("p1_af2", 212, point1, datasets["belong_af_2"], "p1_af2"), + ("p2_af2", 2497, point2, datasets["belong_af_2"], "p2_af2"), + ("p2_af1", 2497, point2, datasets["belong_af_1"], "p2_af1"), + ("p3_af3", 2497, point3, datasets["belong_af_3"], "p3_af3"), + ]: unique_id_sinp = ( "f4428222-d038-40bc-bc5c-6e977bbbc92b" if not data else func.uuid_generate_v4() ) + geom = from_shape(point, srid=4326) + taxon = Taxref.query.filter_by(cd_nom=cd_nom).one() + kwargs = {} + kwargs["comment_description"] = comment_description s = create_synthese( - geom_4326, + geom, taxon, users["self_user"], - datasets["own_dataset"], + ds, source, unique_id_sinp, + [users["admin_user"], users["user"]], + **kwargs, ) db.session.add(s) - data.append(s) + data[name] = s return data @pytest.fixture() -def isolate_synthese(users, datasets, source): - map_center_point = Point(-3.486786, 48.832182) - geom_4326 = from_shape(map_center_point, srid=4326) - taxon = Taxref.query.filter_by(cd_nom=79306).one() - with db.session.begin_nested(): - s = create_synthese( - geom_4326, - taxon, - users["self_user"], - datasets["belong_af_1"], - source, - func.uuid_generate_v4(), +def synthese_sensitive_data(app, users, datasets, source): + data = {} + + # Retrieve all the taxa with a protection status, and the corresponding areas + cte_taxa_area_with_status = ( + db.session.query(TaxrefBdcStatutTaxon.cd_nom, LAreas.id_area) + .select_from(TaxrefBdcStatutTaxon, LAreas) + .join( + TaxrefBdcStatutCorTextValues, + TaxrefBdcStatutTaxon.id_value_text == TaxrefBdcStatutCorTextValues.id_value_text, ) - db.session.add(s) - return s - + .join( + bdc_statut_cor_text_area, + LAreas.id_area == bdc_statut_cor_text_area.c.id_area, + ) + .join( + TaxrefBdcStatutText, bdc_statut_cor_text_area.c.id_text == TaxrefBdcStatutText.id_text + ) + .filter(bdc_statut_cor_text_area.c.id_text == TaxrefBdcStatutCorTextValues.id_text) + .filter(TaxrefBdcStatutText.enable == True) + ).cte("taxa_with_status") + + # Retrieve all the taxa with a sensitivity rule, and the corresponding areas + cte_taxa_area_with_sensitivity = ( + db.session.query(SensitivityRule.cd_nom, cor_sensitivity_area.c.id_area) + .select_from(SensitivityRule) + .join(cor_sensitivity_area, SensitivityRule.id == cor_sensitivity_area.c.id_sensitivity) + .filter(SensitivityRule.active == True) + ).cte("taxa_with_sensitivity") + + # Retrieve a cd_nom and point that fit both a sensitivity rule and a protection status + sensitive_protected_cd_nom, sensitive_protected_id_area = ( + db.session.query( + cte_taxa_area_with_status.c.cd_nom, + cte_taxa_area_with_status.c.id_area, + ) + .join( + cte_taxa_area_with_sensitivity, + sa.and_( + cte_taxa_area_with_status.c.cd_nom == cte_taxa_area_with_sensitivity.c.cd_nom, + cte_taxa_area_with_status.c.id_area == cte_taxa_area_with_sensitivity.c.id_area, + ), + ) + .first() + ) + sensitivity_rule = SensitivityRule.query.filter( + SensitivityRule.cd_nom == sensitive_protected_cd_nom, + SensitivityRule.areas.any(LAreas.id_area == sensitive_protected_id_area), + ).first() + sensitive_protected_area = LAreas.query.filter( + LAreas.id_area == sensitive_protected_id_area + ).first() + # Get one point inside the area : the centroid (assuming the area is convex) + sensitive_protected_point = db.session.query( + func.ST_Centroid(func.ST_Transform(sensitive_protected_area.geom, 4326)) + ).first()[0] + # Add a criteria to the sensitivity rule if needed + id_nomenclature_bio_status = None + id_type_nomenclature_bio_status = ( + BibNomenclaturesTypes.query.filter(BibNomenclaturesTypes.mnemonique == "STATUT_BIO") + .one() + .id_type + ) + id_nomenclature_behaviour = None + id_type_nomenclature_behaviour = ( + BibNomenclaturesTypes.query.filter(BibNomenclaturesTypes.mnemonique == "OCC_COMPORTEMENT") + .one() + .id_type + ) + # Get one criteria for the sensitivity rule if needed + list_criterias_for_sensitivity_rule = sensitivity_rule.criterias + if list_criterias_for_sensitivity_rule: + one_criteria_for_sensitive_rule = list_criterias_for_sensitivity_rule[0] + id_type_criteria_for_sensitive_rule = one_criteria_for_sensitive_rule.id_type + if id_type_criteria_for_sensitive_rule == id_type_nomenclature_bio_status: + id_nomenclature_bio_status = one_criteria_for_sensitive_rule.id_nomenclature + elif id_type_criteria_for_sensitive_rule == id_type_nomenclature_behaviour: + id_nomenclature_behaviour = one_criteria_for_sensitive_rule.id_nomenclature + + # Retrieve a cd_nom and point that fit a protection status but no sensitivity rule + protected_not_sensitive_cd_nom, protected_not_sensitive_id_area = ( + db.session.query(cte_taxa_area_with_status.c.cd_nom, cte_taxa_area_with_status.c.id_area) + .filter( + cte_taxa_area_with_status.c.cd_nom.notin_([cte_taxa_area_with_sensitivity.c.cd_nom]) + ) + .first() + ) + protected_not_sensitive_area = LAreas.query.filter( + LAreas.id_area == protected_not_sensitive_id_area + ).first() + # Get one point inside the area : the centroid (assuming the area is convex) + protected_not_sensitive_point = db.session.query( + func.ST_Centroid(func.ST_Transform(protected_not_sensitive_area.geom, 4326)) + ).first()[0] -@pytest.fixture(scope="function") -def filters(): - """ - Creates one filter per filter type - """ - # Gather all types - avail_filter_types = BibFiltersType.query.order_by(BibFiltersType.id_filter_type).all() - # Init returned filter_dict - filters_dict = {} - # For each type, generate a Filter with db.session.begin_nested(): - for i, filter_type in enumerate(avail_filter_types): - new_filter = TFilters( - label_filter=f"test_{i}", - value_filter=f"value_{i}", - description_filter="Filtre test", - id_filter_type=filter_type.id_filter_type, + for name, cd_nom, point, ds, comment_description in [ + ( + "obs_sensitive_protected", + sensitive_protected_cd_nom, + sensitive_protected_point, + datasets["own_dataset"], + "obs_sensitive_protected", + ), + ( + "obs_protected_not_sensitive", + protected_not_sensitive_cd_nom, + protected_not_sensitive_point, + datasets["own_dataset"], + "obs_protected_not_sensitive", + ), + ( + "obs_sensitive_protected_2", + sensitive_protected_cd_nom, + sensitive_protected_point, + datasets["associate_2_dataset_sensitive"], + "obs_sensitive_protected_2", + ), + ]: + unique_id_sinp = func.uuid_generate_v4() + geom = point + taxon = Taxref.query.filter_by(cd_nom=cd_nom).one() + kwargs = {} + if id_nomenclature_bio_status: + kwargs["id_nomenclature_bio_status"] = id_nomenclature_bio_status + elif id_nomenclature_behaviour: + kwargs["id_nomenclature_behaviour"] = id_nomenclature_behaviour + kwargs["comment_description"] = comment_description + s = create_synthese( + geom, taxon, users["self_user"], ds, source, unique_id_sinp, [], **kwargs ) - filters_dict[filter_type.code_filter_type] = new_filter - db.session.add(new_filter) + db.session.add(s) + data[name] = s + + # Assert that obs_sensitive_protected is a sensitive observation + id_nomenclature_not_sensitive = ( + TNomenclatures.query.filter( + TNomenclatures.nomenclature_type.has(BibNomenclaturesTypes.mnemonique == "SENSIBILITE") + ) + .filter(TNomenclatures.cd_nomenclature == "4") + .one() + ).id_nomenclature + Synthese.query.filter( + Synthese.cd_nom == sensitive_protected_cd_nom + ).first().id_nomenclature_sensitivity != id_nomenclature_not_sensitive + + # Assert that obs_protected_not_sensitive is not a sensitive observation + Synthese.query.filter( + Synthese.cd_nom == protected_not_sensitive_cd_nom + ).first().id_nomenclature_sensitivity == id_nomenclature_not_sensitive + + ## Assert that obs_sensitive_protected and obs_protected_not_sensitive are protected observation + def assert_observation_is_protected(name_observation): + observation_synthese = data[name_observation] + list_areas_observation = [area.id_area for area in observation_synthese.areas] + cd_nom = observation_synthese.cd_nom + list_id_areas_with_status_for_cd_nom = [ + tuple_id_area[0] + for tuple_id_area in db.session.query(cte_taxa_area_with_status.c.id_area) + .filter(cte_taxa_area_with_status.c.cd_nom == cd_nom) + .all() + ] + # assert that intersection of the two lists is not empty + assert set(list_areas_observation).intersection(set(list_id_areas_with_status_for_cd_nom)) - return filters_dict + assert_observation_is_protected("obs_sensitive_protected") + assert_observation_is_protected("obs_protected_not_sensitive") + + return data def create_media(media_path=""): @@ -374,8 +564,13 @@ def create_media(media_path=""): @pytest.fixture def medium(app): + # FIXME: find a better way to get the id_media that will be created + new_id_media = (db.session.query(func.max(TMedias.id_media)).scalar() or 0) + 1 image = Image.new("RGBA", size=(1, 1), color=(155, 0, 0)) - with tempfile.NamedTemporaryFile() as f: + # Delete = false since it will be done automatically + with tempfile.NamedTemporaryFile( + dir=TMedias.base_dir(), prefix=f"{new_id_media}_", suffix=".png", delete=False + ) as f: image.save(f, "png") yield create_media(media_path=str(f.name)) @@ -383,6 +578,7 @@ def medium(app): @pytest.fixture() def reports_data(users, synthese_data): data = [] + # do not commit directly on current transaction, as we want to rollback all changes at the end of tests def create_report(id_synthese, id_role, content, id_type, deleted): new_report = TReport( @@ -396,9 +592,7 @@ def create_report(id_synthese, id_role, content, id_type, deleted): db.session.add(new_report) return new_report - ids = [] - for el in synthese_data: - ids.append(el.id_synthese) + ids = [s.id_synthese for s in synthese_data.values()] # get id by type discussionId = ( BibReportsTypes.query.filter(BibReportsTypes.type == "discussion").first().id_type @@ -413,3 +607,8 @@ def create_report(id_synthese, id_role, content, id_type, deleted): data.append(create_report(id_synthese, *args)) return data + + +@pytest.fixture() +def notifications_enabled(monkeypatch): + monkeypatch.setitem(current_app.config, "NOTIFICATIONS_ENABLED", True) diff --git a/backend/geonature/tests/test_gn_commons.py b/backend/geonature/tests/test_gn_commons.py index 8cc5b8b7d2..3eda70a129 100644 --- a/backend/geonature/tests/test_gn_commons.py +++ b/backend/geonature/tests/test_gn_commons.py @@ -1,6 +1,8 @@ +from pathlib import Path import tempfile import pytest +import json from flask import url_for from geoalchemy2.elements import WKTElement from PIL import Image @@ -8,10 +10,12 @@ from sqlalchemy import func from werkzeug.exceptions import Conflict, Forbidden, NotFound, Unauthorized +from geonature.core.gn_commons.admin import BibFieldAdmin from geonature.core.gn_commons.models import TAdditionalFields, TMedias, TPlaces, BibTablesLocation -from geonature.core.gn_commons.models.base import TModules, TParameters +from geonature.core.gn_commons.models.base import TModules, TParameters, BibWidgets from geonature.core.gn_commons.repositories import TMediaRepository -from geonature.core.gn_permissions.models import TObjects +from geonature.core.gn_commons.tasks import clean_attachments +from geonature.core.gn_permissions.models import PermObject from geonature.utils.env import db from geonature.utils.errors import GeoNatureError @@ -30,7 +34,7 @@ def place(users): @pytest.fixture(scope="function") def additional_field(app, datasets): module = TModules.query.filter(TModules.module_code == "SYNTHESE").one() - obj = TObjects.query.filter(TObjects.code_object == "ALL").one() + obj = PermObject.query.filter(PermObject.code_object == "ALL").one() datasets = list(datasets.values()) additional_field = TAdditionalFields( field_name="test", @@ -95,7 +99,7 @@ def test_get_media(self, medium): assert resp_json["title_fr"] == medium.title_fr assert resp_json["unique_id_media"] == str(medium.unique_id_media) - def test_delete_media(self, medium): + def test_delete_media(self, app, medium): id_media = int(medium.id_media) response = self.client.delete(url_for("gn_commons.delete_media", id_media=id_media)) @@ -103,6 +107,10 @@ def test_delete_media(self, medium): assert response.status_code == 200 assert response.json["resp"] == f"media {id_media} deleted" + # Re-move file in other side to does not break TemporaryFile context manager + media_path = medium.base_dir() / medium.media_path + media_path.rename(media_path.parent / media_path.name[len("deleted_") :]) + def test_create_media(self, medium): title_fr = "test_test" image = Image.new("RGBA", size=(1, 1), color=(155, 0, 0)) @@ -163,7 +171,7 @@ def test_get_media_thumb_not_found(self, nonexistent_media): ) assert response.status_code == 404 - assert response.json["msg"] == "Media introuvable" + assert response.json["description"] == "Media introuvable" @pytest.mark.usefixtures("client_class", "temporary_transaction") @@ -310,16 +318,16 @@ def test_header_content_type_wrong(self, medium, test_media_type, test_content_t @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestCommons: def test_list_modules(self, users): - response = self.client.get(url_for("gn_commons.list_modules")) + response = self.client.get(url_for("gn_commons.list_modules", exclude="GEONATURE")) assert response.status_code == Unauthorized.code set_logged_user_cookie(self.client, users["noright_user"]) - response = self.client.get(url_for("gn_commons.list_modules")) + response = self.client.get(url_for("gn_commons.list_modules", exclude="GEONATURE")) assert response.status_code == 200 assert len(response.json) == 0 set_logged_user_cookie(self.client, users["admin_user"]) - response = self.client.get(url_for("gn_commons.list_modules")) + response = self.client.get(url_for("gn_commons.list_modules", exclude="GEONATURE")) assert response.status_code == 200 assert len(response.json) > 0 @@ -382,10 +390,6 @@ def test_add_place(self, users): response = self.client.post(url_for("gn_commons.add_place")) assert response.status_code == Unauthorized.code - set_logged_user_cookie(self.client, users["noright_user"]) - response = self.client.post(url_for("gn_commons.add_place")) - assert response.status_code == Forbidden.code - set_logged_user_cookie(self.client, users["user"]) response = self.client.post(url_for("gn_commons.add_place"), data=geofeature) assert response.status_code == 200 @@ -458,6 +462,46 @@ def test_get_additional_fields_not_exist_in_module(self): # TODO: Do better than that: assert len(data) == 0 + def test_additional_field_admin(self, app, users, module, perm_object): + set_logged_user_cookie(self.client, users["admin_user"]) + app.config["ADDITIONAL_FIELDS"]["IMPLEMENTED_MODULES"] = [module.module_code] + app.config["ADDITIONAL_FIELDS"]["IMPLEMENTED_OBJECTS"] = [perm_object.code_object] + form_values = { + "field_label": "pytest_valid", + "field_name": "pytest_valid", + "module": module.id_module, + "objects": [perm_object.id_object], + "type_widget": BibWidgets.query.filter_by(widget_name="select").one().id_widget, + "field_values": json.dumps([{"label": "un", "value": 1}]), + } + + req = self.client.post( + "/admin/tadditionalfields/new/?url=/admin/tadditionalfields/", + data=form_values, + content_type="multipart/form-data", + ) + assert req.status_code == 302 + assert db.session.query( + db.session.query(TAdditionalFields).filter_by(field_name="pytest_valid").exists() + ).scalar() + + form_values.update( + { + "field_label": "pytest_invvalid", + "field_name": "pytest_invvalid", + "field_values": json.dumps([{"not_label": "un", "not_value": 1}]), + } + ) + req = self.client.post( + "/admin/tadditionalfields/new/?url=/admin/tadditionalfields/", + data=form_values, + content_type="multipart/form-data", + ) + assert req.status_code != 302 + assert not db.session.query( + db.session.query(TAdditionalFields).filter_by(field_name="pytest_invvalid").exists() + ).scalar() + def test_get_t_mobile_apps(self): response = self.client.get(url_for("gn_commons.get_t_mobile_apps")) @@ -491,3 +535,23 @@ def test_api_get_id_table_location_not_found(self): assert response.status_code == 204 # No content assert response.json is None + + +@pytest.mark.usefixtures("temporary_transaction") +class TestTasks: + def test_clean_attachements(self, monkeypatch, celery_eager, medium): + # Monkey patch the __before_commit_delete not to remove file + # when deleting the medium, so the clean_attachments can work + def mock_delete_media(self): + return None + + monkeypatch.setattr(TMedias, "__before_commit_delete__", mock_delete_media) + + # Remove media to trigger the cleaning + db.session.delete(medium) + db.session.commit() + + clean_attachments() + + # File should be removed + assert not Path(medium.media_path).is_file() diff --git a/backend/geonature/tests/test_gn_meta.py b/backend/geonature/tests/test_gn_meta.py index 2fd6f11b05..a8fca40ad6 100644 --- a/backend/geonature/tests/test_gn_meta.py +++ b/backend/geonature/tests/test_gn_meta.py @@ -11,7 +11,7 @@ from geojson import Point from sqlalchemy import func from werkzeug.exceptions import BadRequest, Conflict, Forbidden, NotFound, Unauthorized -from werkzeug.datastructures import MultiDict +from werkzeug.datastructures import MultiDict, Headers from ref_geo.models import BibAreasTypes, LAreas from geonature.core.gn_commons.models import TModules @@ -21,11 +21,6 @@ TDatasets, ) from geonature.core.gn_meta.routes import get_af_from_id -from geonature.core.gn_permissions.models import ( - CorRoleActionFilterModuleObject, - TActions, - TFilters, -) from geonature.core.gn_synthese.models import Synthese from geonature.utils.env import db @@ -239,56 +234,58 @@ def test_update_acquisition_framework(self, users, acquisition_frameworks): assert response.status_code == 200 assert response.json.get("acquisition_framework_name") == new_name - def test_get_acquisition_frameworks(self, users): - response = self.client.get(url_for("gn_meta.get_acquisition_frameworks")) - assert response.status_code == Unauthorized.code + def test_update_acquisition_framework_forbidden(self, users, acquisition_frameworks): + stranger_user = users["stranger_user"] + set_logged_user_cookie(self.client, stranger_user) + af = acquisition_frameworks["own_af"] - set_logged_user_cookie(self.client, users["admin_user"]) + response = self.client.post( + url_for( + "gn_meta.updateAcquisitionFramework", + id_acquisition_framework=af.id_acquisition_framework, + ), + data=dict(acquisition_framework_name="new_name"), + ) - response = self.client.get(url_for("gn_meta.get_acquisition_frameworks")) - response = self.client.get( - url_for("gn_meta.get_acquisition_frameworks"), - query_string={ - "datasets": "1", - "creator": "1", - "actors": "1", - }, + assert response.status_code == Forbidden.code + assert ( + response.json["description"] + == f"User {stranger_user.identifiant} cannot update acquisition framework {af.id_acquisition_framework}" ) - assert response.status_code == 200 - def test_get_post_acquisition_frameworks(self, users, commune_without_obs): - # SIMPLE TEST WITH POST REQUEST + def test_update_acquisition_framework_forbidden_af(self, users, acquisition_frameworks): + self_user = users["self_user"] + set_logged_user_cookie(self.client, self_user) + af = acquisition_frameworks["own_af"] + response = self.client.post( - url_for("gn_meta.get_acquisition_frameworks"), - json={}, + url_for( + "gn_meta.updateAcquisitionFramework", + id_acquisition_framework=af.id_acquisition_framework, + ), + data=dict(acquisition_framework_name="new_name"), + ) + + assert response.status_code == Forbidden.code + assert ( + response.json["description"] + == f"User {self_user.identifiant} cannot update acquisition framework {af.id_acquisition_framework}" ) + + def test_get_acquisition_frameworks(self, users): + response = self.client.get(url_for("gn_meta.get_acquisition_frameworks")) assert response.status_code == Unauthorized.code set_logged_user_cookie(self.client, users["admin_user"]) - # POST EMPTY REQUEST FAIL WITHOUT ANY PARAMS - response = self.client.post(url_for("gn_meta.get_acquisition_frameworks")) - assert response.status_code == BadRequest.code - # POST REQUEST WITHOUT JSON AND WITHOUT QUERY STRING - response = self.client.post( - url_for("gn_meta.get_acquisition_frameworks"), - json={}, - ) - assert response.status_code == 200 - # POST REQUEST WITHOUT JSON - response = self.client.post( + + response = self.client.get(url_for("gn_meta.get_acquisition_frameworks")) + response = self.client.get( url_for("gn_meta.get_acquisition_frameworks"), query_string={ "datasets": "1", "creator": "1", "actors": "1", }, - json={}, - ) - assert response.status_code == 200 - # TEST RESPONSE WITH ONE FILTER AREA - response = self.client.post( - url_for("gn_meta.get_acquisition_frameworks"), - json={"areas": [[commune_without_obs.id_type, commune_without_obs.id_area]]}, ) assert response.status_code == 200 @@ -301,14 +298,12 @@ def test_get_acquisition_frameworks_list(self, users): response = self.client.get(url_for("gn_meta.get_acquisition_frameworks_list")) assert response.status_code == 200 - def test_filter_acquisition_by_geo( - self, synthese_data, users, isolate_synthese, commune_without_obs - ): + def test_filter_acquisition_by_geo(self, synthese_data, users, commune_without_obs): # security test already passed in previous tests set_logged_user_cookie(self.client, users["admin_user"]) # get 2 synthese observations in two differents AF and two differents communes - s1, s2 = synthese_data[0], isolate_synthese + s1, s2 = synthese_data["p1_af1"], synthese_data["p3_af3"] comm1, comm2 = getCommBySynthese(s1), getCommBySynthese(s2) # prerequisite for the test: @@ -320,7 +315,7 @@ def test_filter_acquisition_by_geo( # search metadata in first commune response = self.client.post( url_for("gn_meta.get_acquisition_frameworks"), - json={"areas": [[comm1.id_type, comm1.id_area]]}, + json={"areas": [comm1.id_area]}, ) ids = [af["id_acquisition_framework"] for af in response.json] assert s1.dataset.id_acquisition_framework in ids @@ -330,7 +325,7 @@ def test_filter_acquisition_by_geo( # get commune for this id synthese response = self.client.post( url_for("gn_meta.get_acquisition_frameworks"), - json={"areas": [[comm2.id_type, comm2.id_area]]}, + json={"areas": [comm2.id_area]}, ) ids = [af["id_acquisition_framework"] for af in response.json] assert s1.dataset.id_acquisition_framework not in ids @@ -339,7 +334,7 @@ def test_filter_acquisition_by_geo( # test no response if a commune have observations response = self.client.post( url_for("gn_meta.get_acquisition_frameworks"), - json={"areas": [[commune_without_obs.id_type, commune_without_obs.id_area]]}, + json={"areas": [commune_without_obs.id_area]}, ) resp = response.json # will return empty response @@ -385,15 +380,72 @@ def test_get_acquisition_framework(self, users, acquisition_frameworks): response = self.client.get(get_af_url) assert response.status_code == 200 + def test_get_acquisition_frameworks_search_af_name( + self, users, acquisition_frameworks, datasets + ): + set_logged_user_cookie(self.client, users["admin_user"]) + af1 = acquisition_frameworks["af_1"] + af2 = acquisition_frameworks["af_2"] + get_af_url = url_for("gn_meta.get_acquisition_frameworks") + + response = self.client.post(get_af_url, json={"search": af1.acquisition_framework_name}) + + af_list = [af["id_acquisition_framework"] for af in response.json] + assert af1.id_acquisition_framework in af_list + assert af2.id_acquisition_framework not in af_list + + def test_get_acquisition_frameworks_search_ds_name( + self, users, acquisition_frameworks, datasets + ): + set_logged_user_cookie(self.client, users["admin_user"]) + ds = datasets["belong_af_1"] + af1 = acquisition_frameworks["af_1"] + af2 = acquisition_frameworks["af_2"] + get_af_url = url_for("gn_meta.get_acquisition_frameworks") + + response = self.client.post(get_af_url, json={"search": ds.dataset_name}) + assert response.status_code == 200 + + af_list = [af["id_acquisition_framework"] for af in response.json] + assert af1.id_acquisition_framework in af_list + assert af2.id_acquisition_framework not in af_list + + def test_get_acquisition_frameworks_search_af_uuid(self, users, acquisition_frameworks): + set_logged_user_cookie(self.client, users["admin_user"]) + + af1 = acquisition_frameworks["af_1"] + + response = self.client.post( + url_for("gn_meta.get_acquisition_frameworks"), + json={"search": str(af1.unique_acquisition_framework_id)[:5]}, + ) + + assert {af["id_acquisition_framework"] for af in response.json} == { + af1.id_acquisition_framework + } + + def test_get_acquisition_frameworks_search_af_date(self, users, acquisition_frameworks): + set_logged_user_cookie(self.client, users["admin_user"]) + + af1 = acquisition_frameworks["af_1"] + + response = self.client.post( + url_for("gn_meta.get_acquisition_frameworks"), + json={"search": af1.acquisition_framework_start_date.strftime("%d/%m/%Y")}, + ) + + expected = {af1.id_acquisition_framework} + assert expected.issubset({af["id_acquisition_framework"] for af in response.json}) + # TODO: check another AF with another start_date (and no DS at search date) is not returned + def test_get_export_pdf_acquisition_frameworks(self, users, acquisition_frameworks): af_id = acquisition_frameworks["own_af"].id_acquisition_framework set_logged_user_cookie(self.client, users["user"]) - response = self.client.get( + response = self.client.post( url_for( - "gn_meta.get_export_pdf_acquisition_frameworks", - id_acquisition_framework=af_id, + "gn_meta.get_export_pdf_acquisition_frameworks", id_acquisition_framework=af_id ) ) @@ -402,10 +454,9 @@ def test_get_export_pdf_acquisition_frameworks(self, users, acquisition_framewor def test_get_export_pdf_acquisition_frameworks_unauthorized(self, acquisition_frameworks): af_id = acquisition_frameworks["own_af"].id_acquisition_framework - response = self.client.get( + response = self.client.post( url_for( - "gn_meta.get_export_pdf_acquisition_frameworks", - id_acquisition_framework=af_id, + "gn_meta.get_export_pdf_acquisition_frameworks", id_acquisition_framework=af_id ) ) @@ -414,48 +465,45 @@ def test_get_export_pdf_acquisition_frameworks_unauthorized(self, acquisition_fr def test_get_acquisition_framework_stats( self, users, acquisition_frameworks, datasets, synthese_data ): - id_af = acquisition_frameworks["orphan_af"].id_acquisition_framework + af = synthese_data["obs1"].dataset.acquisition_framework set_logged_user_cookie(self.client, users["user"]) response = self.client.get( url_for( "gn_meta.get_acquisition_framework_stats", - id_acquisition_framework=id_af, + id_acquisition_framework=af.id_acquisition_framework, ) ) data = response.json assert response.status_code == 200 - assert data["nb_dataset"] == len( - list(filter(lambda ds: ds.id_acquisition_framework == id_af, datasets.values())) - ) + assert data["nb_dataset"] == len(af.datasets) assert data["nb_habitats"] == 0 - assert data["nb_observations"] == len(synthese_data) + obs = [s for s in synthese_data.values() if s.dataset.acquisition_framework == af] + assert data["nb_observations"] == len(obs) # Count of taxa : # Loop all the synthese entries, for each synthese # For each entry, take the max between count_min and count_max. And if # not provided: count_min and/or count_max is 1. Since one entry in # synthese is at least 1 taxon - assert data["nb_taxons"] == sum( - max(s.count_min or 1, s.count_max or 1) for s in synthese_data - ) + assert data["nb_taxons"] == sum(max(s.count_min or 1, s.count_max or 1) for s in obs) def test_get_acquisition_framework_bbox(self, users, acquisition_frameworks, synthese_data): - id_af = acquisition_frameworks["orphan_af"].id_acquisition_framework - geom = Point(geometry=to_shape(synthese_data[0].the_geom_4326)) + # this AF contains at least 2 obs at different locations + af = synthese_data["p1_af1"].dataset.acquisition_framework set_logged_user_cookie(self.client, users["user"]) response = self.client.get( - url_for("gn_meta.get_acquisition_framework_bbox", id_acquisition_framework=id_af) + url_for( + "gn_meta.get_acquisition_framework_bbox", + id_acquisition_framework=af.id_acquisition_framework, + ) ) data = response.json assert response.status_code == 200 - assert data["type"] == "Point" - assert data["coordinates"] == [ - pytest.approx(coord, 0.9) for coord in [geom.geometry.x, geom.geometry.y] - ] + assert data["type"] == "Polygon" def test_datasets_permissions(self, app, datasets, users): ds = datasets["own_dataset"] @@ -494,6 +542,7 @@ def test_datasets_permissions(self, app, datasets, users): [ datasets["own_dataset"], datasets["associate_dataset"], + datasets["associate_2_dataset_sensitive"], ] ) assert set(qs.filter_by_scope(3).all()) == set(datasets.values()) @@ -547,20 +596,13 @@ def test_list_datasets(self, users, datasets, acquisition_frameworks): expected_ds = {dataset.id_dataset for dataset in datasets.values()} resp_ds = {ds["id_dataset"] for ds in response.json} assert expected_ds.issubset(resp_ds) + + afs = [acquisition_frameworks["af_1"], acquisition_frameworks["af_2"]] filtered_response = self.client.get( url_for("gn_meta.get_datasets"), - query_string=MultiDict( - [ - ( - "id_acquisition_framework", - acquisition_frameworks["af_1"].id_acquisition_framework, - ), - ( - "id_acquisition_framework", - acquisition_frameworks["af_2"].id_acquisition_framework, - ), - ] - ), + json={ + "id_acquisition_frameworks": [af.id_acquisition_framework for af in afs], + }, ) assert filtered_response.status_code == 200 expected_ds = { @@ -570,6 +612,19 @@ def test_list_datasets(self, users, datasets, acquisition_frameworks): } filtered_ds = {ds["id_dataset"] for ds in filtered_response.json} assert expected_ds.issubset(filtered_ds) + assert all( + dataset["id_acquisition_framework"] in [af.id_acquisition_framework for af in afs] + for dataset in filtered_response.json + ) + + def test_list_datasets_mobile(self, users, datasets, acquisition_frameworks): + set_logged_user_cookie(self.client, users["admin_user"]) + headers = Headers() + headers.add("User-Agent", "okhttp/") + + response = self.client.get(url_for("gn_meta.get_datasets"), headers=headers) + + assert set(response.json.keys()) == {"data"} def test_create_dataset(self, users): response = self.client.post(url_for("gn_meta.create_dataset")) @@ -586,14 +641,158 @@ def test_get_dataset(self, users, datasets): response = self.client.get(url_for("gn_meta.get_dataset", id_dataset=ds.id_dataset)) assert response.status_code == Unauthorized.code - set_logged_user_cookie(self.client, users["stranger_user"]) + stranger_user = users["stranger_user"] + set_logged_user_cookie(self.client, stranger_user) response = self.client.get(url_for("gn_meta.get_dataset", id_dataset=ds.id_dataset)) assert response.status_code == Forbidden.code + assert ( + response.json["description"] + == f"User {stranger_user.identifiant} cannot read dataset {ds.id_dataset}" + ) set_logged_user_cookie(self.client, users["associate_user"]) response = self.client.get(url_for("gn_meta.get_dataset", id_dataset=ds.id_dataset)) assert response.status_code == 200 + def test_get_dataset_filter_active(self, users, datasets, module): + set_logged_user_cookie(self.client, users["admin_user"]) + + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={"active": True}, + ) + + expected_ds = {dataset.id_dataset for dataset in datasets.values() if dataset.active} + filtered_ds = {ds["id_dataset"] for ds in response.json} + assert expected_ds.issubset(filtered_ds) + + def test_get_dataset_filter_module_code(self, users, datasets, module): + set_logged_user_cookie(self.client, users["admin_user"]) + + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={"module_code": module.module_code}, + ) + + expected_ds = {datasets["with_module_1"].id_dataset} + filtered_ds = {ds["id_dataset"] for ds in response.json} + assert expected_ds.issubset(filtered_ds) + assert datasets["own_dataset"].id_dataset not in filtered_ds + + def test_get_dataset_search(self, users, datasets, module): + set_logged_user_cookie(self.client, users["admin_user"]) + ds = datasets["with_module_1"] + + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={"search": ds.dataset_name}, + ) + + expected_ds = {ds.id_dataset} + filtered_ds = {ds["id_dataset"] for ds in response.json} + assert expected_ds.issubset(filtered_ds) + assert datasets["own_dataset"].id_dataset not in filtered_ds + + def test_get_dataset_search_uuid(self, users, datasets): + ds = datasets["own_dataset"] + set_logged_user_cookie(self.client, users["admin_user"]) + + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={"search": str(ds.unique_dataset_id)[:5]}, + ) + + expected_ds = {ds.id_dataset} + filtered_ds = {dataset["id_dataset"] for dataset in response.json} + assert expected_ds == filtered_ds + + def test_get_dataset_search_date(self, users, datasets): + ds = datasets["own_dataset"] + set_logged_user_cookie(self.client, users["admin_user"]) + + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={"search": ds.meta_create_date.strftime("%d/%m/%Y")}, + ) + + expected_ds = {ds.id_dataset} + filtered_ds = {dataset["id_dataset"] for dataset in response.json} + assert expected_ds.issubset(filtered_ds) + # FIXME: add a DS to fixture with an unmatching meta_create_date + + def test_get_dataset_search_af_matches(self, users, datasets, acquisition_frameworks): + dataset = datasets["belong_af_1"] + acquisition_framework = [ + af + for af in acquisition_frameworks.values() + if af.id_acquisition_framework == dataset.id_acquisition_framework + ][0] + set_logged_user_cookie(self.client, users["admin_user"]) + + # If Acquisition Framework matches, returns all datasets + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={ + "id_acquisition_frameworks": [dataset.id_acquisition_framework], + "search": acquisition_framework.acquisition_framework_name, + }, + ) + + assert {ds["id_acquisition_framework"] for ds in response.json} == { + ds.id_acquisition_framework for ds in acquisition_framework.datasets + } + + def test_get_dataset_search_ds_matches(self, users, datasets, acquisition_frameworks): + dataset = datasets["belong_af_1"] + set_logged_user_cookie(self.client, users["admin_user"]) + + # If Acquisition Framework matches, returns all datasets + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={ + "id_acquisition_frameworks": [dataset.id_acquisition_framework], + "search": dataset.dataset_name, + }, + ) + + assert len(response.json) == 1 + assert response.json[0]["dataset_name"] == dataset.dataset_name + + def test_get_dataset_search_ds_and_af_matches(self, users, datasets, acquisition_frameworks): + dataset = datasets["belong_af_1"] + acquisition_framework = [ + af + for af in acquisition_frameworks.values() + if af.id_acquisition_framework == dataset.id_acquisition_framework + ][0] + set_logged_user_cookie(self.client, users["admin_user"]) + + # If Acquisition Framework matches, returns all datasets + response = self.client.get( + url_for("gn_meta.get_datasets"), + json={ + "id_acquisition_frameworks": [dataset.id_acquisition_framework], + "search": dataset.dataset_name[-4:], + }, + ) + + assert {ds["id_acquisition_framework"] for ds in response.json} == { + ds.id_acquisition_framework for ds in acquisition_framework.datasets + } + + def test_get_dataset_forbidden_ds(self, users, datasets): + ds = datasets["own_dataset"] + self_user = users["self_user"] + set_logged_user_cookie(self.client, self_user) + + response = self.client.get(url_for("gn_meta.get_dataset", id_dataset=ds.id_dataset)) + + assert response.status_code == Forbidden.code + assert ( + response.json["description"] + == f"User {self_user.identifiant} cannot read dataset {ds.id_dataset}" + ) + def test_update_dataset(self, users, datasets): new_name = "thenewname" ds = datasets["own_dataset"] @@ -616,18 +815,7 @@ def test_update_dataset_not_found(self, users, datasets, unexisted_id): def test_update_dataset_forbidden(self, users, datasets): ds = datasets["own_dataset"] - user = users["stranger_user"] - actions_scopes = [{"action": "U", "scope": "2", "module": "METADATA"}] - with db.session.begin_nested(): - for act_scope in actions_scopes: - action = TActions.query.filter_by(code_action=act_scope.get("action", "")).one() - scope = TFilters.query.filter_by(value_filter=act_scope.get("scope", "")).one() - module = TModules.query.filter_by(module_code=act_scope.get("module", "")).one() - permission = CorRoleActionFilterModuleObject( - role=user, action=action, filter=scope, module=module - ) - db.session.add(permission) - set_logged_user_cookie(self.client, user) + set_logged_user_cookie(self.client, users["stranger_user"]) response = self.client.patch(url_for("gn_meta.update_dataset", id_dataset=ds.id_dataset)) @@ -637,26 +825,25 @@ def test_dataset_pdf_export(self, users, datasets): unexisting_id = db.session.query(func.max(TDatasets.id_dataset)).scalar() + 1 ds = datasets["own_dataset"] - response = self.client.get( + response = self.client.post( url_for("gn_meta.get_export_pdf_dataset", id_dataset=ds.id_dataset) ) assert response.status_code == Unauthorized.code set_logged_user_cookie(self.client, users["self_user"]) - response = self.client.get( + response = self.client.post( url_for("gn_meta.get_export_pdf_dataset", id_dataset=unexisting_id) ) assert response.status_code == NotFound.code - response = self.client.get( + response = self.client.post( url_for("gn_meta.get_export_pdf_dataset", id_dataset=ds.id_dataset) ) assert response.status_code == Forbidden.code set_logged_user_cookie(self.client, users["user"]) - - response = self.client.get( + response = self.client.post( url_for("gn_meta.get_export_pdf_dataset", id_dataset=ds.id_dataset) ) assert response.status_code == 200 @@ -673,6 +860,7 @@ def test_uuid_report(self, users, synthese_data): response = self.client.get(url_for("gn_meta.uuid_report")) assert response.status_code == 200 + @pytest.mark.xfail(reason="FIXME") def test_uuid_report_with_dataset_id( self, synthese_corr, users, datasets, synthese_data, unexisted_id ): @@ -687,19 +875,14 @@ def test_uuid_report_with_dataset_id( url_for("gn_meta.uuid_report"), query_string={"id_dataset": unexisted_id} ) - # Since response is a csv in the string format in bytes, we must - # convert it and read it - for (i, row) in get_csv_from_response(response.data): - for key, val in synthese_corr.items(): - assert row[key] == str(getattr(synthese_data[i], val) or "") - + obs = synthese_data.values() assert response.status_code == 200 - - for (i, row) in get_csv_from_response(response_empty.data): - for key, val in synthese_corr.items(): - assert getattr(synthese_data[i], val) == "" + rows = list(get_csv_from_response(response_empty.data)) + # TODO check result assert response_empty.status_code == 200 + rows = list(get_csv_from_response(response_empty.data)) + assert len(rows) == 1 # header only def test_sensi_report(self, users, datasets): dataset_id = datasets["own_dataset"].id_dataset @@ -745,7 +928,6 @@ def test_get_af_from_id_none(self): get_af_from_id(id_af=id_af, af_list=af_list) def test__get_create_scope(self, app, users): - modcode = "METADATA" with app.test_request_context(headers=logged_user_headers(users["user"])): @@ -857,8 +1039,8 @@ def test_publish_acquisition_framework_no_data( def test_publish_acquisition_framework_with_data( self, mocked_publish_mail, users, acquisition_frameworks, synthese_data ): - set_logged_user_cookie(self.client, users["user"]) - af = acquisition_frameworks["orphan_af"] + set_logged_user_cookie(self.client, users["stranger_user"]) + af = acquisition_frameworks["af_1"] response = self.client.get( url_for( "gn_meta.publish_acquisition_framework", diff --git a/backend/geonature/tests/test_gn_permission.py b/backend/geonature/tests/test_gn_permission.py index 9e3e3b92a8..1b049f998a 100644 --- a/backend/geonature/tests/test_gn_permission.py +++ b/backend/geonature/tests/test_gn_permission.py @@ -1,50 +1,5 @@ import pytest -from flask import request, template_rendered, url_for -from sqlalchemy import func -from werkzeug.exceptions import Forbidden, Unauthorized - -from pypnusershub.db.models import User - -from geonature.core.gn_permissions.models import CorRoleActionFilterModuleObject, TFilters -from geonature.core.gn_permissions.tools import ( - cruved_scope_for_user_in_module, - get_user_from_token_and_raise, -) -from geonature.utils.env import DB - -from .fixtures import filters -from .utils import logged_user_headers, set_logged_user_cookie - - -@pytest.fixture -def captured_templates(app): - recorded = [] - - def record(sender, template, context, **extra): - recorded.append((template, context)) - - template_rendered.connect(record, app) - try: - yield recorded - finally: - template_rendered.disconnect(record, app) - - -@pytest.fixture -def unavailable_filter_id(): - return DB.session.query(func.max(TFilters.id_filter)).scalar() + 1 - - -@pytest.fixture -def unavailable_user_id(): - return DB.session.query(func.max(User.id_role)).scalar() + 1 - - -@pytest.fixture -def deactivate_csrf(monkeypatch, app): - # Deactivate the csrf check on the form otherwise it will appear - # with errors on csrf - monkeypatch.setitem(app.config, "WTF_CSRF_ENABLED", False) +from flask import url_for @pytest.mark.usefixtures("client_class") @@ -54,299 +9,3 @@ def test_logout(self): assert response.status_code == 200 assert response.data == b"Logout" - - -@pytest.mark.usefixtures("client_class", "temporary_transaction") -class TestGnPermissionsTools: - """Test of gn_permissions tools functions""" - - def test_user_from_token_and_raise_fail(self): - # no cookie - with pytest.raises(Unauthorized, match="No token"): - resp = get_user_from_token_and_raise(request) - # set a fake cookie - self.client.set_cookie("/", "token", "fake token") - # fake request to set cookie - response = self.client.get( - url_for("gn_permissions_backoffice.filter_list", id_filter_type=4) - ) - with pytest.raises(Unauthorized, match="Token corrupted") as exc_info: - resp = get_user_from_token_and_raise(request) - - def test_get_user_from_token_and_raise(self, app, users): - user = users["user"] - - with app.test_request_context(headers=logged_user_headers(user)): - app.preprocess_request() - resp = get_user_from_token_and_raise(request) - assert isinstance(resp, dict) - assert resp["id_role"] == user.id_role - assert resp["id_organisme"] == user.id_organisme - assert resp["identifiant"] == user.identifiant - - def test_cruved_scope_for_user_in_module(self, users): - admin_user = users["admin_user"] - # get cruved for geonature - cruved, herited = cruved_scope_for_user_in_module( - id_role=admin_user.id_role, module_code="GEONATURE" - ) - assert herited == False - assert cruved == {"C": "3", "R": "3", "U": "3", "V": "3", "E": "3", "D": "3"} - - cruved, herited = cruved_scope_for_user_in_module( - id_role=admin_user.id_role, module_code="GEONATURE", get_id=True - ) - - assert herited == False - assert cruved == {"C": 4, "R": 4, "U": 4, "V": 4, "E": 4, "D": 4} - - -@pytest.mark.usefixtures("client_class", "temporary_transaction") -class TestGnPermissionsView: - def test_get_users(self, users, captured_templates): - """ - Test get page with all roles - """ - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - - response = self.client.get(url_for("gn_permissions_backoffice.users")) - - template, context = captured_templates[0] - assert template.name == "users.html" - assert response.status_code == 200 - users_context = context["users"] - assert b"Liste des roles" in response.data - assert admin_user.id_role in [user["id_role"] for user in users_context] - - def test_get_user_cruveds(self, users, captured_templates): - """ - Test get page with all cruved of a user - """ - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - - response = self.client.get( - url_for("gn_permissions_backoffice.user_cruved", id_role=admin_user.id_role) - ) - - template, context = captured_templates[0] - assert template.name == "cruved_user.html" - assert response.status_code == 200 - user_context = context["user"] - assert user_context["id_role"] == admin_user.id_role - assert len(context["modules"]) != 0 - - def test_get_cruved_scope_form_allowed(self, users): - """ - Test get user cruved form page - """ - admin_user = users["admin_user"] - # with user admin - set_logged_user_cookie(self.client, admin_user) - - response = self.client.get( - url_for( - "gn_permissions_backoffice.permission_form", - id_role=admin_user.id_role, - id_module=1, - id_object=None, - ) - ) - - assert response.status_code == 200 - - def test_post_cruved_scope_form(self, users): - """ - Test a post an an update on table cor_role_action_filter_module_object - """ - self_user = users["self_user"] - set_logged_user_cookie(self.client, self_user) - # WARNING: wet set ID not code in the form ! - data = {"C": "4", "R": "3", "U": "3", "V": "4", "E": "2", "D": "3"} - - response = self.client.post( - url_for( - "gn_permissions_backoffice.permission_form", - id_role=self_user.id_role, - id_module=0, - id_object=None, - ), - data=data, - content_type="multipart/form-data", - ) - - assert response.status_code == 302 - - def test_get_user_other_permissions(self, users, captured_templates): - """ - Test of view who return the user permissions expect SCOPE - """ - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - - response = self.client.get( - url_for("gn_permissions_backoffice.user_other_permissions", id_role=admin_user.id_role) - ) - - template, context = captured_templates[0] - assert template.name == "user_other_permissions.html" - assert response.status_code == 200 - user_context = context["user"] - assert user_context["id_role"] == admin_user.id_role - - def test_post_other_perm_wrong(self, users): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - # test wrong parameter in form - wrong_data = {"module": "0", "action": "1", "filter": "truc"} - - response = self.client.post( - url_for( - "gn_permissions_backoffice.other_permissions_form", - id_role=1, - id_filter_type=4, - ), - data=wrong_data, - ) - - # if the post returns a 200, its an error which renders the initial template form (otherwise it should be 302, see other tests) - response.status_code == 200 - - def test_post_other_perm(self, deactivate_csrf, users, filters): - """ - Test post/update a permission (no scope) - """ - - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - one_filter = filters[list(filters.keys())[0]] - valid_data = {"module": "1", "action": "1", "filter": one_filter.id_filter} - - response = self.client.post( - url_for( - "gn_permissions_backoffice.other_permissions_form", - id_role=admin_user.id_role, - id_filter_type=one_filter.id_filter_type, - ), - data=valid_data, - ) - - assert response.status_code == 302 - - # @pytest.mark.usefixtures('deactivate_csrf') seems not working here - def test_update_other_perm(self, deactivate_csrf, users, filters): - admin_user = users["admin_user"] - self_user = users["self_user"] - set_logged_user_cookie(self.client, admin_user) - # Get the permission to update - permission = ( - DB.session.query(CorRoleActionFilterModuleObject) - .filter(CorRoleActionFilterModuleObject.id_role == self_user.id_role) - .first() - ) - id_permission = permission.id_permission - # Take the last filter so that we cannot have a SCOPE filter type - one_filter = filters[list(filters.keys())[-1]] - # change action and filter - update_data = {"module": "1", "action": "1", "filter": one_filter.id_filter} - - response = self.client.post( - url_for( - "gn_permissions_backoffice.other_permissions_form", - id_role=self_user.id_role, - id_filter_type=one_filter.id_filter_type, - id_permission=permission.id_permission, - ), - data=update_data, - ) - - assert response.status_code == 302 - # TODO: maybe a better way to do this - updated_permission = DB.session.query(CorRoleActionFilterModuleObject).get(id_permission) - assert updated_permission.id_action == int(update_data["action"]) - - def test_post_filter(self, deactivate_csrf, users, filters): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - data = { - "label_filter": "Les sonneurs", - "value_filter": "212", - "description_filter": "Filtre de validation des sonneurs", - } - one_filter = filters[list(filters.keys())[0]] - id_filter_type = one_filter.id_filter_type - - response = self.client.post( - url_for("gn_permissions_backoffice.filter_form", id_filter_type=id_filter_type), - data=data, - ) - - assert response.status_code == 302 - - def test_update_filter(self, deactivate_csrf, users, filters): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - one_filter = filters[list(filters.keys())[0]] - update_data = { - "label_filter": "Les sonneurs bleus", - "value_filter": "213", - "description_filter": "Filtre de validation des sonneurs bleus", - "submit": "Valider", - } - - response = self.client.post( - url_for( - "gn_permissions_backoffice.filter_form", - id_filter_type=one_filter.id_filter_type, - id_filter=one_filter.id_filter, - ), - data=update_data, - ) - - assert response.status_code == 302 - - def test_get_filters_list(self, users, captured_templates, filters): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - one_filter = filters[list(filters.keys())[0]] - id_filter_type = one_filter.id_filter_type - - response = self.client.get( - url_for("gn_permissions_backoffice.filter_list", id_filter_type=id_filter_type) - ) - - template, context = captured_templates[0] - assert template.name == "filter_list.html" - # Here context["filters"] is of type BaseQuery - filters_gathered = context["filters"].all() - assert id_filter_type in [filt.id_filter_type for filt in filters_gathered] - assert response.status_code == 200 - - def test_delete_filter_fail(self, users, unavailable_filter_id): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - - response = self.client.post( - url_for("gn_permissions_backoffice.delete_filter", id_filter=unavailable_filter_id) - ) - - assert response.status_code == 404 - - def test_delete_filter(self, users, filters): - admin_user = users["admin_user"] - set_logged_user_cookie(self.client, admin_user) - one_filter = filters[list(filters.keys())[0]] - - response = self.client.post( - url_for("gn_permissions_backoffice.delete_filter", id_filter=one_filter.id_filter) - ) - - # Since there is a redirection : 302 - assert response.status_code == 302 - assert ( - url_for( - "gn_permissions_backoffice.filter_list", id_filter_type=one_filter.id_filter_type - ) - in response.location - ) diff --git a/backend/geonature/tests/test_mtd.py b/backend/geonature/tests/test_mtd.py index d266c6a534..ff765de890 100644 --- a/backend/geonature/tests/test_mtd.py +++ b/backend/geonature/tests/test_mtd.py @@ -1,17 +1,29 @@ import pytest -from geonature.core.gn_meta.mtd.mtd_utils import post_jdd_from_user -from geonature.core.gn_meta.mtd import add_unexisting_digitizer -from geonature.core.gn_meta.models import TDatasets +from geonature.core.gn_meta.mtd import sync_af_and_ds_by_user, MTDInstanceApi +from pypnusershub.db.models import Organisme as BibOrganismes +from geonature.core.gn_meta.models import TAcquisitionFramework +from geonature.utils.config import config + +from geonature.utils.env import db @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestMTD: - @pytest.mark.xfail(reason="must fix CI on http request") # FIXME + @pytest.mark.skip(reason="must fix CI on http request") # FIXME def test_mtd(self): - add_unexisting_digitizer(10991) - post_jdd_from_user(id_user=10991) - jdds = TDatasets.query.filter_by(id_digitizer=10991).all() - assert len(jdds) >= 1 - jdd_one = jdds[0] - assert jdd_one.id_digitizer == 10991 + mtd_api = MTDInstanceApi(config["MTD_API_ENDPOINT"], config["MTD"]["ID_INSTANCE_FILTER"]) + af_list = mtd_api.get_af_list() + af = af_list[0] + if not af: + return + af_digitizer_id = af["id_digitizer"] + af_actors = af["actors"] + org_uuid = af_actors[0]["uuid_organism"] + if af_digitizer_id: + sync_af_and_ds_by_user(af_digitizer_id) + jdds = TAcquisitionFramework.query.filter_by(id_digitizer=af_digitizer_id).all() + assert len(jdds) >= 1 + assert db.session.query( + BibOrganismes.query.filter_by(uuid_organisme=org_uuid).exists() + ).scalar() diff --git a/backend/geonature/tests/test_notifications.py b/backend/geonature/tests/test_notifications.py index aaae0767c3..f6b0fe464b 100644 --- a/backend/geonature/tests/test_notifications.py +++ b/backend/geonature/tests/test_notifications.py @@ -4,7 +4,7 @@ import datetime from unittest.mock import patch -from flask import url_for, jsonify, current_app +from flask import url_for from werkzeug.exceptions import Forbidden, Unauthorized, BadRequest from geonature.utils.env import db @@ -16,13 +16,18 @@ NotificationTemplate, ) from geonature.core.notifications import utils -from geonature.tests.fixtures import celery_eager +from geonature.tests.fixtures import celery_eager, notifications_enabled from .utils import set_logged_user_cookie log = logging.getLogger() +@pytest.fixture(scope="class") +def clear_default_notification_rules(): + NotificationRule.query.filter(NotificationRule.id_role.is_(None)).delete() + + @pytest.fixture() def notification_data(users): with db.session.begin_nested(): @@ -98,17 +103,18 @@ def notification_rule(users, rule_method, rule_category): id_role=users["admin_user"].id_role, code_method=rule_method.code, code_category=rule_category.code, + subscribed=True, ) db.session.add(new_notification_rule) return new_notification_rule -@pytest.fixture() -def notifications_enabled(monkeypatch): - monkeypatch.setitem(current_app.config, "NOTIFICATIONS_ENABLED", True) - - -@pytest.mark.usefixtures("client_class", "temporary_transaction") +@pytest.mark.usefixtures( + "client_class", + "temporary_transaction", + "notifications_enabled", + "clear_default_notification_rules", +) class TestNotification: def test_list_database_notification(self, users, notification_data): # Init data for test @@ -237,47 +243,53 @@ def test_list_notification_rules(self, users, notification_rule): data = response.get_json() assert len(data) == 1 - def test_create_rule_ko(self, users, rule_method, rule_category): - # Init data for test - url = "notifications.create_rule" - log.debug("Url d'appel %s", url_for(url)) - - # TEST NO USER - response = self.client.put(url_for(url), content_type="application/json") - assert response.status_code == 401 + def test_update_rule(self, users, rule_method, rule_category): + role = users["user"] + subscribe_url = url_for( + "notifications.update_rule", + code_method=rule_method.code, + code_category=rule_category.code, + subscribe=True, + ) + unsubscribe_url = url_for( + "notifications.update_rule", + code_method=rule_method.code, + code_category=rule_category.code, + subscribe=False, + ) - # TEST CONNECTED USER WITHOUT DATA - set_logged_user_cookie(self.client, users["admin_user"]) - response = self.client.put(url_for(url)) - assert response.status_code == 400 + assert not db.session.query( + NotificationRule.query.filter_by( + id_role=role.id_role, + method=rule_method, + category=rule_category, + ).exists() + ).scalar() - # TEST CONNECTED USER WITH DATA BUT WRONG KEY - set_logged_user_cookie(self.client, users["admin_user"]) - data = {"method": rule_method.code, "categorie": rule_category.code} - response = self.client.put(url_for(url), json=data, content_type="application/json") - assert response.status_code == BadRequest.code + response = self.client.post(subscribe_url) + assert response.status_code == Unauthorized.code, response.data - # TEST CONNECTED USER WITH DATA BUT WRONG VALUE - set_logged_user_cookie(self.client, users["admin_user"]) - data = {"code_method": 1, "code_category": rule_category.code} - response = self.client.put(url_for(url), json=data, content_type="application/json") - assert response.status_code == BadRequest.code + set_logged_user_cookie(self.client, role) - def test_create_rule_ok(self, users, rule_method, rule_category): + response = self.client.post(subscribe_url) + assert response.status_code == 200, response.data - url = "notifications.create_rule" - log.debug("Url d'appel %s", url_for(url)) + rule = NotificationRule.query.filter_by( + id_role=role.id_role, + method=rule_method, + category=rule_category, + ).one() + assert rule.subscribed - # TEST SUCCESSFULL RULE CREATION - set_logged_user_cookie(self.client, users["user"]) - data = {"code_method": rule_method.code, "code_category": rule_category.code} - response = self.client.put(url_for(url), json=data, content_type="application/json") + response = self.client.post(unsubscribe_url) assert response.status_code == 200, response.data - newRule = response.get_json() - assert newRule.get("code_method") == rule_method.code - assert newRule.get("code_category") == rule_category.code - assert newRule.get("id_role") == users["user"].id_role + rule = NotificationRule.query.filter_by( + id_role=role.id_role, + method=rule_method, + category=rule_category, + ).one() + assert not rule.subscribed def test_delete_all_rules(self, users, notification_rule): # Init data for test @@ -312,37 +324,7 @@ def test_delete_all_rules(self, users, notification_rule): ).exists() ).scalar() - def test_delete_rule(self, users, notification_rule): - # Init data for test - url = "notifications.delete_rule" - log.debug("Url d'appel %s", url_for(url, id=1)) - - # TEST NO USER - response = self.client.delete(url_for(url, id=1)) - assert response.status_code == Unauthorized.code - - # TEST CONNECTED USER WITHOUT RULE - set_logged_user_cookie(self.client, users["user"]) - response = self.client.delete(url_for(url, id=notification_rule.id)) - assert response.status_code == Forbidden.code - assert db.session.query( - NotificationRule.query.filter_by( - id=notification_rule.id, - ).exists() - ).scalar() - - # TEST CONNECTED USER WITH RULE - set_logged_user_cookie(self.client, users["admin_user"]) - response = self.client.delete(url_for(url, id=notification_rule.id)) - assert response.status_code == 204 - assert not db.session.query( - NotificationRule.query.filter_by( - id=notification_rule.id, - ).exists() - ).scalar() - def test_list_methods(self, users, rule_method): - # Init data for test url = "notifications.list_notification_methods" log.debug("Url d'appel %s", url_for(url)) @@ -359,7 +341,6 @@ def test_list_methods(self, users, rule_method): assert len(data) > 0 def test_list_notification_categories(self, users): - # Init data for test url = "notifications.list_notification_categories" log.debug("Url d'appel %s", url_for(url)) @@ -377,7 +358,6 @@ def test_list_notification_categories(self, users): # test only notification insertion in database whitout dispatch def test_send_db_notification(self, users): - result = utils.send_db_notification( users["admin_user"], "test creation", "no templating", "https://geonature.fr" ) @@ -397,6 +377,7 @@ def test_dispatch_notifications_database_with_like( id_role=role.id_role, code_method="DB", code_category=rule_category_1.code, + subscribed=True, ) db.session.add(new_rule) @@ -428,7 +409,7 @@ def test_dispatch_notifications_database_with_like( assert notif.code_status == "UNREAD" def test_dispatch_notifications_database_with_like( - self, users, rule_category, rule_category_1, rule_template, notifications_enabled + self, users, rule_category, rule_category_1, rule_template ): role = users["user"] @@ -438,6 +419,7 @@ def test_dispatch_notifications_database_with_like( id_role=role.id_role, code_method="DB", code_category=rule_category_1.code, + subscribed=True, ) db.session.add(new_rule) @@ -469,7 +451,7 @@ def test_dispatch_notifications_database_with_like( assert notif.code_status == "UNREAD" def test_dispatch_notifications_mail_with_template( - self, users, rule_category, rule_mail_template, notifications_enabled, celery_eager + self, users, rule_category, rule_mail_template, celery_eager ): with db.session.begin_nested(): users["user"].email = "user@geonature.fr" @@ -479,6 +461,7 @@ def test_dispatch_notifications_mail_with_template( id_role=users["user"].id_role, code_method="EMAIL", code_category=rule_category.code, + subscribed=True, ) ) db.session.add( @@ -486,6 +469,7 @@ def test_dispatch_notifications_mail_with_template( id_role=users["admin_user"].id_role, code_method="EMAIL", code_category=rule_category.code, + subscribed=True, ) ) diff --git a/backend/geonature/tests/test_permissions.py b/backend/geonature/tests/test_permissions.py index 11cb12561a..85f15938df 100644 --- a/backend/geonature/tests/test_permissions.py +++ b/backend/geonature/tests/test_permissions.py @@ -3,29 +3,25 @@ import pytest +from flask import g + from geonature.core.gn_commons.models import TModules from geonature.core.gn_permissions.models import ( - TObjects, - TFilters, - TActions, - BibFiltersType, - CorRoleActionFilterModuleObject as Permission, + PermObject, + PermAction, + PermFilterType, + Permission, + PermissionAvailable, ) -from geonature.core.gn_permissions.tools import _get_scopes_by_action +from geonature.core.gn_permissions.tools import get_scopes_by_action, has_any_permissions_by_action from geonature.utils.env import db from pypnusershub.db.models import User -@pytest.fixture +@pytest.fixture(scope="class") def actions(): - return {action.code_action: action for action in TActions.query.all()} - - -@pytest.fixture -def scopes(): - scope_type = BibFiltersType.query.filter_by(code_filter_type="SCOPE").one() - return {f.value_filter: f for f in TFilters.query.filter_by(filter_type=scope_type).all()} + return {action.code_action: action for action in PermAction.query.all()} def create_module(label): @@ -38,29 +34,29 @@ def create_module(label): ) -@pytest.fixture +@pytest.fixture(scope="class") def module_gn(): return TModules.query.filter_by(module_code="GEONATURE").one() -@pytest.fixture +@pytest.fixture(scope="class") def object_all(): - return TObjects.query.filter_by(code_object="ALL").one() + return PermObject.query.filter_by(code_object="ALL").one() -@pytest.fixture +@pytest.fixture(scope="class") def object_a(): - obj = TObjects(code_object="object_a") + obj = PermObject(code_object="object_a") return obj -@pytest.fixture +@pytest.fixture(scope="class") def object_b(): - obj = TObjects(code_object="object_b") + obj = PermObject(code_object="object_b") return obj -@pytest.fixture +@pytest.fixture(scope="class") def module_a(): with db.session.begin_nested(): module = create_module("module_a") @@ -68,7 +64,7 @@ def module_a(): return module -@pytest.fixture +@pytest.fixture(scope="class") def module_b(): with db.session.begin_nested(): module = create_module("module_b") @@ -76,7 +72,7 @@ def module_b(): return module -@pytest.fixture +@pytest.fixture() def groups(): groups = { "g1": User(groupe=True), @@ -88,7 +84,7 @@ def groups(): return groups -@pytest.fixture +@pytest.fixture() def roles(groups): roles = { "r1": User(), @@ -118,53 +114,117 @@ def cruved_dict(scopes): } -@pytest.fixture -def permissions(roles, groups, actions, scopes): +def b_cruved(code: str) -> dict: + return {action: bool(int(b)) for action, b in zip("CRUVED", code)} + + +@pytest.fixture() +def permissions(roles, groups, actions, module_gn): roles = ChainMap(roles, groups) - def _permissions(role, cruved, **kwargs): + def _permissions(role, cruved, *, module=module_gn, **kwargs): role = roles[role] + scope_type = PermFilterType.query.filter_by(code_filter_type="SCOPE").one() with db.session.begin_nested(): for a, s in zip("CRUVED", cruved): if s == "-": continue + elif s == "3": + s = None + else: + s = int(s) db.session.add( - Permission(role=role, action=actions[a], filter=scopes[s], **kwargs) + Permission( + role=role, action=actions[a], module=module, scope_value=s, **kwargs + ) ) return _permissions -@pytest.fixture +@pytest.fixture() +def permissions_available(object_all, actions): + def _permissions_available( + module, str_actions, object=object_all, scope=False, sensitivity=False + ): + with db.session.begin_nested(): + for action in str_actions: + if action == "-": + continue + else: + print(actions) + + db.session.add( + PermissionAvailable( + id_module=module.id_module, + id_object=object.id_object, + id_action=actions[action].id_action, + scope_filter=scope, + sensitivity_filter=sensitivity, + ) + ) + + return _permissions_available + + +@pytest.fixture() def assert_cruved(roles): def _assert_cruved(role, cruved, module=None, object=None): role = roles[role] module_code = module.module_code if module else None object_code = object.code_object if object else None - assert _get_scopes_by_action( + assert get_scopes_by_action( id_role=role.id_role, module_code=module_code, object_code=object_code ) == cruved_dict(cruved) return _assert_cruved -@pytest.mark.usefixtures("temporary_transaction") +@pytest.fixture(scope="class") +def g_permissions(): + """ + Fixture to initialize flask g variable + Mandatory if we want to run this test file standalone + """ + g._permissions_by_user = {} + g._permissions = {} + + +@pytest.mark.usefixtures("temporary_transaction", "g_permissions") class TestPermissions: - def test_no_right(self, assert_cruved, module_gn, module_a, object_a): + def test_no_right(self, assert_cruved, module_gn, module_a, object_a, g_permissions): assert_cruved("r1", "000000") assert_cruved("g1_r1", "000000", module_a) assert_cruved("r1", "000000", module_gn, object_a) assert_cruved("r1", "000000", module_a, object_a) - def test_module_perm(self, permissions, assert_cruved, module_gn, module_a): - permissions("r1", "0123--", module=module_a) + def test_module_perm(self, permissions, assert_cruved, module_gn, module_a, module_b): + permissions("r1", "1----2", module=module_gn) + permissions("r1", "-1---1", module=module_a) + permissions("r1", "--1---", module=module_b) - assert_cruved("r1", "000000") - assert_cruved("r1", "012300", module_a) + assert_cruved("r1", "100002") + assert_cruved("r1", "010001", module_a) + assert_cruved("r1", "001000", module_b) assert_cruved("r2", "000000", module_a) - def test_group_perm(self, permissions, assert_cruved, module_gn, module_a): - permissions("g1", "0123--", module=module_a) + def test_no_module_no_object_specified( + self, permissions, assert_cruved, module_gn, object_all, module_a, object_a + ): + permissions("r1", "11----", module=module_gn, object=object_all) + permissions("r1", "--11--", module=module_gn, object=object_a) + permissions("r1", "----11", module=module_a, object=object_all) + + assert_cruved("r1", "110000", module=module_gn) + assert_cruved("r1", "110000", object=object_all) + assert_cruved("r1", "110000") + + assert_cruved("r1", "001100", object=object_a) + + assert_cruved("r1", "000011", module=module_a) + + def test_group_inheritance(self, permissions, assert_cruved, module_gn, module_a): + permissions("g1", "-123--", module=module_a) assert_cruved("r1", "000000") assert_cruved("r1", "000000", module_a) @@ -173,19 +233,14 @@ def test_group_perm(self, permissions, assert_cruved, module_gn, module_a): assert_cruved("g2_r1", "000000") assert_cruved("g2_r1", "000000", module_a) - def test_inheritance(self, permissions, assert_cruved, module_gn, module_a, module_b): - permissions("r1", "121---", module=module_gn) - permissions("r1", "012123", module=module_b) + def test_user_and_group_perm(self, permissions, assert_cruved, module_a): + permissions("g1", "-123--", module=module_a) + permissions("g1_r1", "1-23--", module=module_a) - assert_cruved("r1", "121000") - assert_cruved("r1", "121000", module_a) # A inherite GN permissions - assert_cruved("r1", "012123", module_b) # perms on B have precedence - assert_cruved("r2", "000000") - assert_cruved("r2", "000000", module_a) - assert_cruved("r2", "000000", module_b) + assert_cruved("g1_r1", "112300", module=module_a) # max of user and group permission def test_multi_groups_one_perm(self, permissions, assert_cruved, module_a): - permissions("g1", "0123--", module=module_a) + permissions("g1", "-123--", module=module_a) assert_cruved("g1_r1", "012300", module_a) assert_cruved("g12_r1", "012300", module_a) @@ -193,37 +248,11 @@ def test_multi_groups_one_perm(self, permissions, assert_cruved, module_a): def test_multi_groups_multi_perms(self, permissions, assert_cruved, module_a): permissions("g1", "12131-", module=module_a) - permissions("g2", "0121-3", module=module_a) + permissions("g2", "-121-3", module=module_a) assert_cruved("g1_r1", "121310", module_a) assert_cruved("g2_r1", "012103", module_a) - assert_cruved("g12_r1", "122313", module_a) # max of two permissions - - def test_group_inheritance(self, permissions, assert_cruved, module_gn, module_a, module_b): - permissions("g1", "121---", module=module_gn) - permissions("g1", "012123", module=module_b) - - assert_cruved("g1_r1", "121000") - assert_cruved("g1_r1", "121000", module_a) # A inherite GN permissions - assert_cruved("g1_r1", "012123", module_b) # perms on B have precedence - - def test_multi_group_inheritance( - self, permissions, assert_cruved, module_gn, module_a, module_b - ): - permissions("g1", "121-1-", module=module_gn) - permissions("g2", "2101-3", module=module_gn) - permissions("g1", "0121-2", module=module_b) - - assert_cruved("g12_r1", "221113") # max of both permissions - assert_cruved("g12_r1", "221113", module_a) # A inherite max of GN permissions - assert_cruved("g12_r1", "012112", module_b) # perms on B have precedence - - assert_cruved("g1_r1", "121010") - assert_cruved("g2_r1", "210103") - assert_cruved("g1_r1", "121010", module_a) # A inherite GN permissions - assert_cruved("g1_r1", "012112", module_b) # perms on B have precedence - assert_cruved("g2_r1", "210103", module_a) # A inherite GN permissions - assert_cruved("g2_r1", "210103", module_b) # B inherite GN permissions + assert_cruved("g12_r1", "122313", module_a) # max of both groups permissions def test_object_perm(self, permissions, assert_cruved, module_a, module_b, object_a, object_b): permissions("r1", "1----2", module=module_a) @@ -233,6 +262,30 @@ def test_object_perm(self, permissions, assert_cruved, module_a, module_b, objec assert_cruved("r1", "000000") assert_cruved("r1", "100002", module_a) - assert_cruved("r1", "110001", module_a, object_a) + assert_cruved("r1", "010001", module_a, object_a) assert_cruved("r1", "001000", module_b, object_a) - assert_cruved("r1", "100102", module_a, object_b) + assert_cruved("r1", "000100", module_a, object_b) + + def test_multiple_scope_with_permissions_available( + self, permissions, permissions_available, assert_cruved, module_a + ): + # scope cruved must be 3 even if other permissions that scope are declared + permissions_available(module_a, "CRUVED", scope=True, sensitivity=True) + permissions("r1", "333333", module=module_a, sensitivity_filter=True) + assert_cruved("r1", "333333", module_a) + + def test_has_any_perms( + self, permissions, permissions_available, assert_cruved, module_a, roles + ): + # scope cruved must be 3 even if other permissions that scope are declared + permissions_available(module_a, "CRUVED", scope=True, sensitivity=True) + permissions("r1", "333---", module=module_a, sensitivity_filter=False) + + assert has_any_permissions_by_action( + id_role=roles["r1"].id_role, module_code=module_a.module_code + ) == b_cruved("111000") + + permissions("r2", "333333", module=module_a, sensitivity_filter=True) + assert has_any_permissions_by_action( + id_role=roles["r2"].id_role, module_code=module_a.module_code + ) == b_cruved("111111") diff --git a/backend/geonature/tests/test_pr_occhab.py b/backend/geonature/tests/test_pr_occhab.py new file mode 100644 index 0000000000..4396459717 --- /dev/null +++ b/backend/geonature/tests/test_pr_occhab.py @@ -0,0 +1,384 @@ +import pytest +from copy import deepcopy + +from flask import url_for +from werkzeug.exceptions import Unauthorized, Forbidden, BadRequest +from shapely.geometry import Point +import geojson +from geojson import Feature +from geoalchemy2.shape import from_shape, to_shape +import sqlalchemy as sa +from marshmallow import EXCLUDE + +from geonature.utils.env import db + +from pypn_habref_api.models import Habref +from pypnnomenclature.models import TNomenclatures +from utils_flask_sqla_geo.schema import FeatureSchema, FeatureCollectionSchema + +from .utils import set_logged_user_cookie +from .fixtures import * + +occhab = pytest.importorskip("gn_module_occhab") + +from gn_module_occhab.models import Station, OccurenceHabitat +from gn_module_occhab.schemas import StationSchema + + +@pytest.fixture +def station(datasets): + ds = datasets["own_dataset"] + p = Point(3.634, 44.399) + nomenc = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="NAT_OBJ_GEO"), + TNomenclatures.mnemonique == "Stationnel", + ) + ).one() + s = Station( + dataset=ds, + comment="Ma super station", + geom_4326=from_shape(p, srid=4326), + nomenclature_geographic_object=nomenc, + ) + habref = Habref.query.first() + nomenc_tech_collect = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="TECHNIQUE_COLLECT_HAB"), + TNomenclatures.label_fr == "Plongées", + ) + ).one() + s.habitats.extend( + [ + OccurenceHabitat( + cd_hab=habref.cd_hab, + nom_cite="forêt", + id_nomenclature_collection_technique=nomenc_tech_collect.id_nomenclature, + ), + OccurenceHabitat( + cd_hab=habref.cd_hab, + nom_cite="prairie", + id_nomenclature_collection_technique=nomenc_tech_collect.id_nomenclature, + ), + ] + ) + with db.session.begin_nested(): + db.session.add(s) + return s + + +@pytest.fixture +def station2(datasets, station): + ds = datasets["own_dataset"] + p = Point(5, 46) + nomenc = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="NAT_OBJ_GEO"), + TNomenclatures.mnemonique == "Stationnel", + ) + ).one() + s = Station( + dataset=ds, + comment="Ma super station 2", + geom_4326=from_shape(p, srid=4326), + nomenclature_geographic_object=nomenc, + ) + habref = Habref.query.filter(Habref.cd_hab != station.habitats[0].cd_hab).first() + nomenc_tech_collect = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="TECHNIQUE_COLLECT_HAB"), + TNomenclatures.label_fr == "Plongées", + ) + ).one() + s.habitats.extend( + [ + OccurenceHabitat( + cd_hab=habref.cd_hab, + nom_cite="forêt", + id_nomenclature_collection_technique=nomenc_tech_collect.id_nomenclature, + ), + OccurenceHabitat( + cd_hab=habref.cd_hab, + nom_cite="prairie", + id_nomenclature_collection_technique=nomenc_tech_collect.id_nomenclature, + ), + ] + ) + with db.session.begin_nested(): + db.session.add(s) + return s + + +@pytest.mark.usefixtures("client_class", "temporary_transaction") +class TestOcchab: + def test_list_stations(self, users, datasets, station): + url = url_for("occhab.list_stations") + + response = self.client.get(url) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["noright_user"]) + response = self.client.get(url) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["user"]) + response = self.client.get(url) + assert response.status_code == 200 + StationSchema(many=True).validate(response.json) + + set_logged_user_cookie(self.client, users["user"]) + response = self.client.get(url, query_string={"format": "geojson"}) + assert response.status_code == 200 + StationSchema(as_geojson=True, many=True).validate(response.json) + collection = FeatureCollectionSchema().load(response.json) + assert station.id_station in {feature["id"] for feature in collection["features"]} + + response = self.client.get(url, query_string={"format": "geojson", "habitats": "1"}) + assert response.status_code == 200 + collection = FeatureCollectionSchema().load(response.json) + feature = next(filter(lambda feature: feature["id"], collection["features"])) + assert len(feature["properties"]["habitats"]) == len(station.habitats) + + def test_get_station(self, users, station): + url = url_for("occhab.get_station", id_station=station.id_station) + + response = self.client.get(url) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["noright_user"]) + response = self.client.get(url) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["stranger_user"]) + response = self.client.delete(url) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["user"]) + response = self.client.get(url) + assert response.status_code == 200 + response_station = StationSchema( + only=["observers", "dataset", "habitats"], + as_geojson=True, + ).load( + response.json, + unknown=EXCLUDE, + ) + assert set(response_station.habitats) == set(station.habitats) + + def test_create_station(self, users, datasets, station): + url = url_for("occhab.create_or_update_station") + point = Point(3.634, 44.399) + nomenc_nat_obj_geo = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="NAT_OBJ_GEO"), + TNomenclatures.mnemonique == "Stationnel", + ) + ).one() + nomenc_tech_collect = TNomenclatures.query.filter( + sa.and_( + TNomenclatures.nomenclature_type.has(mnemonique="TECHNIQUE_COLLECT_HAB"), + TNomenclatures.label_fr == "Lidar", + ) + ).one() + habref = Habref.query.first() + feature = Feature( + geometry=point, + properties={ + "id_dataset": datasets["own_dataset"].id_dataset, + "id_nomenclature_geographic_object": nomenc_nat_obj_geo.id_nomenclature, + "comment": "Une station", + "observers": [ + { + "id_role": users["user"].id_role, + }, + ], + "habitats": [ + { + "cd_hab": habref.cd_hab, + "id_nomenclature_collection_technique": nomenc_tech_collect.id_nomenclature, + "nom_cite": "prairie", + }, + ], + }, + ) + + response = self.client.post(url, data=feature) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["noright_user"]) + response = self.client.post(url, data=feature) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["user"]) + + response = self.client.post(url, data=feature) + assert response.status_code == 200, response.json + new_feature = FeatureSchema().load(response.json) + new_station = Station.query.get(new_feature["id"]) + assert new_station.comment == "Une station" + assert to_shape(new_station.geom_4326).equals_exact(Point(3.634, 44.399), 0.01) + assert len(new_station.habitats) == 1 + habitat = new_station.habitats[0] + assert habitat.nom_cite == "prairie" + assert len(new_station.observers) == 1 + observer = new_station.observers[0] + assert observer.id_role == users["user"].id_role + + # Test unexisting id dataset + data = deepcopy(feature) + data["properties"]["id_dataset"] = -1 + response = self.client.post(url, data=data) + assert response.status_code == 400, response.json + assert "unexisting dataset" in response.json["description"].casefold(), response.json + + # Try modify existing station + data = deepcopy(feature) + data["properties"]["id_station"] = station.id_station + response = self.client.post(url, data=data) + db.session.refresh(station) + assert station.comment == "Ma super station" # original comment + + # Try leveraging observers to modify existing user + data = deepcopy(feature) + data["properties"]["observers"][0]["nom_role"] = "nouveau nom" + response = self.client.post(url, data=data) + assert response.status_code == 200, response.json + db.session.refresh(users["user"]) + assert users["user"].nom_role != "nouveau nom" + + # Try associate other station habitat to this station + data = deepcopy(feature) + id_habitat = station.habitats[0].id_habitat + data["properties"]["habitats"][0]["id_habitat"] = id_habitat + response = self.client.post(url, data=data) + assert response.status_code == 400, response.json + assert ( + "habitat does not belong to this station" in response.json["description"].casefold() + ), response.json + assert id_habitat in {hab.id_habitat for hab in station.habitats} + + def test_update_station(self, users, station, station2): + url = url_for("occhab.create_or_update_station", id_station=station.id_station) + feature = StationSchema(as_geojson=True, only=["habitats", "observers", "dataset"]).dump( + station + ) + + response = self.client.post(url, data=feature) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["noright_user"]) + response = self.client.post(url, data=feature) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["stranger_user"]) + response = self.client.post(url, data=feature) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["user"]) + + # Try modifying id_station + id_station = station.id_station + data = deepcopy(feature) + data["properties"]["id_station"] = station2.id_station + data["properties"]["habitats"] = [] + assert len(station2.habitats) == 2 + id_habitats = [hab.id_habitat for hab in station2.habitats] + response = self.client.post(url, data=data) + assert response.status_code == 400, response.json + assert "unmatching id_station" in response.json["description"].casefold(), response.json + db.session.refresh(station2) + assert len(station2.habitats) == 2 + + # Try adding an occurence + cd_hab_list = [occhab.cd_hab for occhab in OccurenceHabitat.query.all()] + other_habref = Habref.query.filter(~Habref.cd_hab.in_(cd_hab_list)).first() + feature["properties"]["habitats"].append( + { + "cd_hab": other_habref.cd_hab, + "id_nomenclature_collection_technique": feature["properties"]["habitats"][0][ + "id_nomenclature_collection_technique" + ], + "nom_cite": "monde merveilleux", + }, + ) + response = self.client.post(url, data=feature) + assert response.status_code == 200, response.json + feature = FeatureSchema().load(response.json) + assert len(feature["properties"]["habitats"]) == 3 + + # Try modifying existing occurence + habitat = next( + filter( + lambda hab: hab["nom_cite"] == "monde merveilleux", + feature["properties"]["habitats"], + ) + ) + habitat["nom_cite"] = "monde fantastique" + response = self.client.post(url, data=feature) + assert response.status_code == 200, response.json + feature = FeatureSchema().load(response.json) + assert len(feature["properties"]["habitats"]) == 3 + habitat = next( + filter( + lambda hab: hab["id_habitat"] == habitat["id_habitat"], + feature["properties"]["habitats"], + ) + ) + assert habitat["nom_cite"] == "monde fantastique" + + # Try associate/modify other station habitat + habitat = feature["properties"]["habitats"][0] + habitat2 = station2.habitats[0] + habitat["id_habitat"] = habitat2.id_habitat + response = self.client.post(url, data=feature) + assert response.status_code == 400, response.json + assert ( + "habitat does not belong to this station" in response.json["description"].casefold() + ), response.json + assert habitat2.id_station == station2.id_station + + # # Try re-create habitat + # data = deepcopy(feature) + # del data["properties"]["habitats"][1]["id_habitat"] + # response = self.client.post(url, data=data) + # assert response.status_code == 200, response.json + + # # Try associate other station habitat to this habitat + # data = deepcopy(feature) + # id_habitat = station2.habitats[0].id_habitat + # data["properties"]["habitats"][0]["id_habitat"] = id_habitat + # station2_habitats = {hab.id_habitat for hab in station2.habitats} + # response = self.client.post(url, data=data) + # assert response.status_code == 200, response.json + # feature = FeatureSchema().load(response.json) + # station = Station.query.get(feature["properties"]["id_station"]) + # station_habitats = {hab.id_habitat for hab in station.habitats} + # assert station_habitats.isdisjoint(station2_habitats) + + def test_delete_station(self, users, station): + url = url_for("occhab.delete_station", id_station=station.id_station) + + response = self.client.delete(url) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["noright_user"]) + response = self.client.delete(url) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["stranger_user"]) + response = self.client.delete(url) + assert response.status_code == Forbidden.code + + set_logged_user_cookie(self.client, users["user"]) + response = self.client.delete(url) + assert response.status_code == 204 + assert not db.session.query( + Station.query.filter_by(id_station=station.id_station).exists() + ).scalar() + + def test_get_default_nomenclatures(self, users): + response = self.client.get(url_for("occhab.get_default_nomenclatures")) + assert response.status_code == Unauthorized.code + set_logged_user_cookie(self.client, users["user"]) + response = self.client.get(url_for("occhab.get_default_nomenclatures")) + assert response.status_code == 200 diff --git a/backend/geonature/tests/test_pr_occtax.py b/backend/geonature/tests/test_pr_occtax.py index c8f4127e61..39f718480e 100644 --- a/backend/geonature/tests/test_pr_occtax.py +++ b/backend/geonature/tests/test_pr_occtax.py @@ -4,12 +4,11 @@ from datetime import datetime as dt from flask import url_for, current_app, g -from werkzeug.exceptions import Forbidden, NotFound +from werkzeug.exceptions import Unauthorized, Forbidden, NotFound from shapely.geometry import Point from geoalchemy2.shape import from_shape from sqlalchemy import func -from geonature.core.gn_permissions.models import VUsersPermissions from geonature.core.gn_synthese.models import Synthese from geonature.utils.env import db from geonature.utils.config import config @@ -160,11 +159,6 @@ def unexisting_id_releve(): return (db.session.query(func.max(TRelevesOccurrence.id_releve_occtax)).scalar() or 0) + 1 -@pytest.fixture(scope="function") -def permission(users): - return db.session.query(VUsersPermissions).filter_by(id_role=users["user"].id_role).first() - - @pytest.mark.usefixtures("client_class", "temporary_transaction", "datasets") class TestOcctax: def test_get_releve(self, users, releve_occtax): @@ -237,8 +231,24 @@ def test_post_releve_in_module_bis(self, users, releve_data, module, datasets): data = response.json assert data["properties"]["id_module"] == module.id_module - def test_get_defaut_nomenclatures(self): + def test_get_defaut_nomenclatures(self, users): response = self.client.get(url_for("pr_occtax.getDefaultNomenclatures")) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["user"]) + + response = self.client.get(url_for("pr_occtax.getDefaultNomenclatures")) + assert response.status_code == 200 + + def test_get_one_counting(self, occurrence, users): + print(occurrence.cor_counting_occtax) + set_logged_user_cookie(self.client, users["admin_user"]) + response = self.client.get( + url_for( + "pr_occtax.getOneCounting", + id_counting=occurrence.cor_counting_occtax[0].id_counting_occtax, + ) + ) assert response.status_code == 200 @@ -340,32 +350,4 @@ def test_get_releve_filter_wrong_type(self, users, wrong_value): response = self.client.get(url_for("pr_occtax.getReleves"), query_string=query_string) - assert response.status_code == 500 - - -@pytest.mark.usefixtures("temporary_transaction") -class TestReleveRepository: - def test_get_one(self, releve_occtax, permission): - repository = ReleveRepository(TRelevesOccurrence) - repo = repository.get_one(id_releve=releve_occtax.id_releve_occtax, info_user=permission) - - assert repo[0].id_releve_occtax == releve_occtax.id_releve_occtax - - def test_get_one_not_found(self, unexisting_id_releve, permission): - repository = ReleveRepository(TRelevesOccurrence) - - with pytest.raises(NotFound): - repository.get_one(id_releve=unexisting_id_releve, info_user=permission) - - def test_delete(self, releve_occtax, permission): - repository = ReleveRepository(TRelevesOccurrence) - - rel = repository.delete(releve_occtax.id_releve_occtax, permission) - - assert rel.id_releve_occtax == releve_occtax.id_releve_occtax - - def test_delete_not_found(self, unexisting_id_releve): - repository = ReleveRepository(TRelevesOccurrence) - - with pytest.raises(NotFound): - repository.delete(unexisting_id_releve, permission) + assert response.status_code == 500 # FIXME 500 should not be possible diff --git a/backend/geonature/tests/test_ref_geo.py b/backend/geonature/tests/test_ref_geo.py deleted file mode 100644 index 982cfb70a7..0000000000 --- a/backend/geonature/tests/test_ref_geo.py +++ /dev/null @@ -1,409 +0,0 @@ -import pytest -import json - -from flask import url_for, current_app -from werkzeug.exceptions import Unauthorized, BadRequest -from jsonschema import validate as validate_json -from alembic.migration import MigrationContext -from alembic.script import ScriptDirectory - -from geonature.utils.env import db -from geonature.core.gn_meta.models import TDatasets, TAcquisitionFramework -from geonature.utils.env import migrate - -from ref_geo.models import BibAreasTypes, LAreas -from pypnusershub.db.tools import user_to_token - -from .fixtures import acquisition_frameworks, datasets -from .utils import set_logged_user_cookie - - -polygon = { - "type": "Polygon", - "coordinates": [ - [ - [6.058788299560547, 44.740515073054915], - [6.039562225341797, 44.7189291865304], - [6.075954437255859, 44.70270398212803], - [6.119728088378906, 44.70392408044993], - [6.13861083984375, 44.73429623703402], - [6.099643707275391, 44.75770484489134], - [6.058788299560547, 44.740515073054915], - ] - ], -} - -CITY = "La Motte-en-Champsaur" - - -def has_french_dem(): - config = migrate.get_config() - script = ScriptDirectory.from_config(config) - migration_context = MigrationContext.configure(db.session.connection()) - current_heads = migration_context.get_current_heads() - current_heads = set(map(lambda rev: rev.revision, script.get_all_current(current_heads))) - return "1715cf31a75d" in current_heads # ign bd alti - - -@pytest.fixture(scope="function") -def area_commune(): - return BibAreasTypes.query.filter_by(type_code="COM").one() - - -@pytest.mark.usefixtures("client_class", "temporary_transaction") -class TestRefGeo: - expected_altitude = pytest.approx({"altitude_min": 984, "altitude_max": 2335}, rel=1e-2) - expected_communes = {"La Motte-en-Champsaur", "Saint-Bonnet-en-Champsaur", "Aubessagne"} - - def test_get_geo_info(self): - response = self.client.post( - url_for("ref_geo.getGeoInfo"), - json={ - "geometry": polygon, - "area_type": "COM", - }, - ) - assert response.status_code == 200 - communes = {area["area_name"] for area in response.json["areas"]} - assert communes == self.expected_communes - if not has_french_dem(): - pytest.xfail("No French DEM") - assert response.json["altitude"] == self.expected_altitude - - def test_get_geo_no_payload(self): - response = self.client.post(url_for("ref_geo.getGeoInfo")) - - assert response.status_code == 400 - assert response.json["description"] == "Missing request payload" - - def test_get_geo_no_geom(self): - response = self.client.post(url_for("ref_geo.getGeoInfo"), json={}) - - assert response.status_code == 400 - assert response.json["description"] == "Missing 'geometry' in request payload" - - def test_get_geo_info_id_type_error(self): - response = self.client.post( - url_for("ref_geo.getGeoInfo"), - json={ - "geometry": polygon, - "id_type": "aWrongType", - }, - ) - - assert response.status_code == 400 - assert response.json["description"] == "Parameter 'id_type' must be an integer" - - def test_get_geo_info_id_type(self, area_commune): - id_type = area_commune.id_type - - response = self.client.post( - url_for("ref_geo.getGeoInfo"), - json={ - "geometry": polygon, - "id_type": int(id_type), - }, - ) - - assert all(area["id_type"] == id_type for area in response.json["areas"]) - - def test_get_altitude(self): - if not has_french_dem(): - pytest.xfail("No French DEM") - response = self.client.post( - url_for("ref_geo.getAltitude"), - json={ - "geometry": polygon, - }, - ) - assert response.status_code == 200 - assert response.json == self.expected_altitude - - def test_get_altitude_no_payload(self): - if not has_french_dem(): - pytest.xfail("No French DEM") - - response = self.client.post(url_for("ref_geo.getAltitude")) - - assert response.status_code == 400 - assert response.json["description"] == "Missing request payload" - - def test_get_altitude_no_geometry(self): - if not has_french_dem(): - pytest.xfail("No French DEM") - - response = self.client.post(url_for("ref_geo.getAltitude"), json={}) - - assert response.status_code == 400 - assert response.json["description"] == "Missing 'geometry' in request payload" - - def test_get_area_intersection(self, area_commune): - response = self.client.post( - url_for("ref_geo.getAreasIntersection"), - json={ - "geometry": polygon, - }, - ) - assert response.status_code == 200 - validate_json( - instance=response.json, - schema={ - "type": "object", - "patternProperties": { - "[0-9]*": { - "type": "object", - "properties": { - "type_code": { - "type": "string", - }, - "type_name": { - "type": "string", - }, - "areas": { - "type": "array", - "items": { - "type": "object", - "properties": { - "area_code": { - "type": "string", - }, - "area_name": { - "type": "string", - }, - "id_area": { - "type": "integer", - }, - "id_type": { - "type": "integer", - }, - }, - "additionalProperties": False, - }, - }, - }, - "additionalProperties": False, - }, - }, - "additionalProperties": False, - }, - ) - - communes = { - area["area_name"] for area in response.json[str(area_commune.id_type)]["areas"] - } - assert communes == self.expected_communes - - def test_get_area_intersection_no_payload(self): - response = self.client.post(url_for("ref_geo.getAreasIntersection")) - - assert response.status_code == 400 - assert response.json["description"] == "Missing request payload" - - def test_get_area_intersection_no_geom(self): - response = self.client.post(url_for("ref_geo.getAreasIntersection"), json={}) - - assert response.status_code == 400 - assert response.json["description"] == "Missing 'geometry' in request payload" - - def test_get_area_intersection_id_type(self, area_commune): - id_type = area_commune.id_type - - response = self.client.post( - url_for("ref_geo.getAreasIntersection"), json={"geometry": polygon, "id_type": id_type} - ) - assert response.status_code == 200 - resp_json = response.json - for area_type in resp_json.values(): - for area in area_type["areas"]: - assert area["id_type"] == id_type - - def test_get_area_intersection_id_type_wrong(self): - response = self.client.post( - url_for("ref_geo.getAreasIntersection"), - json={"geometry": polygon, "id_type": "wrongType"}, - ) - assert response.status_code == 400 - assert response.json["description"] == "Parameter 'id_type' must be an integer" - - def test_get_area_intersection_area_type(self, area_commune): - area_type = area_commune.type_code - - response = self.client.post( - url_for("ref_geo.getAreasIntersection"), - json={"geometry": polygon, "area_type": area_type}, - ) - - assert response.status_code == 200 - resp_json = response.json - assert all(a_type["type_code"] == area_type for a_type in resp_json.values()) - - def test_get_municipalities(self): - response = self.client.get(url_for("ref_geo.get_municipalities")) - assert response.status_code == 200 - - def test_get_municipalities_nom_com(self): - response = self.client.get( - url_for("ref_geo.get_municipalities"), query_string={"nom_com": CITY} - ) - - assert response.status_code == 200 - assert response.json[0]["nom_com"] == CITY - - def test_get_areas(self): - response = self.client.get(url_for("ref_geo.get_areas")) - assert response.status_code == 200 - - def test_get_areas_enable_wrong(self): - response = self.client.get(url_for("ref_geo.get_areas"), query_string={"enable": "wrong"}) - - assert response.status_code == 400 - assert ( - response.json["message"] - == "Le paramètre 'enable' accepte seulement les valeurs: true, false, all." - ) - - def test_get_areas_enable_false(self): - response = self.client.get(url_for("ref_geo.get_areas"), query_string={"enable": False}) - - assert response.status_code == 200 - assert all(not area["enable"] for area in response.json) - - def test_get_areas_enable_true(self): - response = self.client.get(url_for("ref_geo.get_areas"), query_string={"enable": True}) - - assert response.status_code == 200 - assert all(area["enable"] for area in response.json) - - def test_get_areas_id_type(self, area_commune): - id_type_commune = area_commune.id_type - - response = self.client.get( - url_for("ref_geo.get_areas"), query_string={"id_type": id_type_commune} - ) - - assert response.status_code == 200 - assert all(area["id_type"] == id_type_commune for area in response.json) - - def test_get_areas_type_code(self, area_commune): - type_code = area_commune.type_code - - response = self.client.get( - url_for("ref_geo.get_areas"), query_string={"type_code": type_code} - ) - - assert response.status_code == 200 - assert all(area["id_type"] == area_commune.id_type for area in response.json) - - def test_get_areas_area_name(self): - response = self.client.get(url_for("ref_geo.get_areas"), query_string={"area_name": CITY}) - - assert response.status_code == 200 - assert response.json[0]["area_name"] == CITY - - def test_get_areas_as_geojson(self, area_commune): - """ - This test can't try to get only one commune - Example : if first commune is Aast, we can get many result with ilike operator - """ - type_code = area_commune.type_code - id_type = area_commune.id_type - first_comm = LAreas.query.filter(LAreas.id_type == id_type).first() - # will test many responses are return - response = self.client.get( - url_for("ref_geo.get_areas"), - query_string={"type_code": type_code, "format": "geojson"}, - ) - assert response.status_code == 200 - assert len(response.json) > 0 - result_comm = response.json[0] - result_type = result_comm["properties"]["area_type"]["type_code"] - assert result_comm["geometry"] is not None - assert result_type == type_code - # will test only one response with correct format - response = self.client.get( - url_for("ref_geo.get_areas"), - query_string={ - "type_code": type_code, - "format": "geojson", - "area_name": first_comm.area_name, - }, - ) - result = response.json[0] - assert result["geometry"] is not None - assert result["properties"]["id_type"] == first_comm.id_type - - def test_get_area_size(self): - response = self.client.post( - url_for("ref_geo.get_area_size"), - json={ - "geometry": polygon, - }, - ) - assert response.status_code == 200 - assert response.json == pytest.approx(30526916, rel=1e-3) - - def test_get_area_size_no_payload(self): - response = self.client.post(url_for("ref_geo.get_area_size")) - - assert response.status_code == 400 - assert response.json["description"] == "Missing request payload" - - def test_get_area_size_no_geom(self): - response = self.client.post(url_for("ref_geo.get_area_size"), json={}) - - assert response.status_code == 400 - assert response.json["description"] == "Missing 'geometry' in request payload" - - def test_get_types(self): - response = self.client.get(url_for("ref_geo.get_area_types")) - print(response.json) - assert response.status_code == 200 - - def test_get_types_by_code(self, area_commune): - type_code = area_commune.type_code - - # GET area type with fake code - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"code": type_code + "_FOO"} - ) - assert response.status_code == BadRequest.code - - # GET area type with correct code - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"code": type_code} - ) - area = response.json[0] - assert response.status_code == 200 - assert area["type_name"] == area_commune.type_name - assert area["type_code"] == area_commune.type_code - assert area["id_type"] == area_commune.id_type - - def test_get_types_by_name(self, area_commune): - type_name = area_commune.type_name - # GET area type with correct name - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"name": area_commune.type_name} - ) - assert response.status_code == 200 - assert response.json[0]["type_code"] == area_commune.type_code - - # GET area type with fake name - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"name": type_name + "_FOO"} - ) - assert response.status_code == 200 - assert len(response.json) == 0 - - # GET area type with exact name - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"name": type_name} - ) - assert response.status_code == 200 - assert response.json[0]["type_code"] == area_commune.type_code - - # GET area type with part of name - response = self.client.get( - url_for("ref_geo.get_area_types"), query_string={"name": type_name[:1]} - ) - assert response.status_code == 200 - assert len(response.json) > 0 diff --git a/backend/geonature/tests/test_reports.py b/backend/geonature/tests/test_reports.py index ae5116c904..fdcb54e124 100644 --- a/backend/geonature/tests/test_reports.py +++ b/backend/geonature/tests/test_reports.py @@ -1,17 +1,50 @@ -import pytest import json +import pytest from flask import url_for from sqlalchemy import func -from werkzeug.exceptions import Forbidden, BadRequest, Unauthorized, NotFound +from werkzeug.exceptions import BadRequest, Forbidden, NotFound, Unauthorized +from geonature.core.gn_synthese.models import BibReportsTypes, Synthese, TReport +from geonature.core.notifications.models import Notification, NotificationRule from geonature.utils.env import db -from geonature.core.gn_synthese.models import TReport, BibReportsTypes, Synthese from .fixtures import * from .utils import logged_user_headers, set_logged_user_cookie +def add_notification_rule(user): + with db.session.begin_nested(): + new_notification_rule = NotificationRule( + id_role=user.id_role, + code_method="DB", + code_category="OBSERVATION-COMMENT", + subscribed=True, + ) + db.session.add(new_notification_rule) + return new_notification_rule + + +@pytest.fixture() +def admin_notification_rule(users): + return add_notification_rule(users["admin_user"]) + + +@pytest.fixture() +def associate_user_notification_rule(users): + return add_notification_rule(users["associate_user"]) + + +@pytest.fixture() +def user_notification_rule(users): + return add_notification_rule(users["user"]) + + +@pytest.fixture() +def self_user_notification_rule(users): + return add_notification_rule(users["self_user"]) + + @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestReports: def test_create_report(self, synthese_data, users): @@ -85,10 +118,8 @@ def test_list_reports(self, reports_data, synthese_data, users): # TEST GET WITHOUT REQUIRED ID SYNTHESE set_logged_user_cookie(self.client, users["admin_user"]) response = self.client.get(url_for(url)) - ids = [] - for el in synthese_data: - ids.append(el.id_synthese) assert response.status_code == NotFound.code + ids = [s.id_synthese for s in synthese_data.values()] # TEST GET BY ID SYNTHESE response = self.client.get( url_for(url, idSynthese=ids[0], idRole=users["admin_user"].id_role, type="discussion") @@ -107,3 +138,144 @@ def test_list_reports(self, reports_data, synthese_data, users): # NO TYPE - TYPE IS NOT REQUIRED response = self.client.get(url_for(url, idSynthese=ids[1])) assert response.status_code == 200 + + +@pytest.mark.usefixtures("client_class", "notifications_enabled", "temporary_transaction") +class TestReportsNotifications: + def post_comment(self, synthese, user): + """Post a comment on a synthese row as a user""" + set_logged_user_cookie(self.client, user) + url = "gn_synthese.create_report" + id_synthese = synthese.id_synthese + data = {"item": id_synthese, "content": "comment 4", "type": "discussion"} + return self.client.post(url_for(url), data=data) + + def test_report_notification_on_own_obs( + self, + synthese_data, + users, + admin_notification_rule, + user_notification_rule, + self_user_notification_rule, + ): + """ + Given: + - user and admin_user are observer of a synthese data + - self_user is the digitiser + When + - user adds a comment + Then + - admin_user and self_user receives a notification + - user does not receive a notification since user wrote a comment + """ + synthese = synthese_data["obs1"] + + response = self.post_comment(synthese=synthese, user=users["user"]) + + # Just test that the comment had been sent + assert response.status_code == 204 + + # Check that admin_user (observer) and self_user (digitiser) are notified + id_roles = {user.id_role for user in (users["admin_user"], users["self_user"])} + notifications = Notification.query.filter(Notification.id_role.in_(id_roles)).all() + + assert {notification.id_role for notification in notifications} == id_roles + assert all(synthese.nom_cite in notif.content for notif in notifications) + # Check that user is not notified since he posted the comment + assert ( + Notification.query.filter(Notification.id_role == users["user"].id_role).first() + is None + ) + + def test_report_notification_on_not_own_obs( + self, + synthese_data, + users, + admin_notification_rule, + user_notification_rule, + self_user_notification_rule, + associate_user_notification_rule, + ): + """ + Given: + - user and admin_user are observer of a synthese data + - self_user is the digitiser + When + - associate_user adds a comment + Then + - user, admin_user and self_user receives a notification + - associate_user does not receive a notification since associate_user wrote a comment + """ + + synthese = synthese_data["obs1"] + response = self.post_comment(synthese=synthese, user=users["associate_user"]) + + # Just test that the comment had been sent + assert response.status_code == 204 + + # Check that user, admin_user (observers) and self_user (digitiser) are notified + id_roles = { + user.id_role for user in (users["user"], users["admin_user"], users["self_user"]) + } + notifications = Notification.query.filter(Notification.id_role.in_(id_roles)).all() + + assert {notification.id_role for notification in notifications} == id_roles + assert all(synthese.nom_cite in notif.content for notif in notifications) + # But check also that associate_user is not notified for the comment he posted + assert ( + Notification.query.filter( + Notification.id_role == users["associate_user"].id_role + ).first() + is None + ) + + def test_report_notification_on_obs_commented( + self, + synthese_data, + users, + associate_user_notification_rule, + admin_notification_rule, + user_notification_rule, + self_user_notification_rule, + ): + """ + Given: + - user and admin_user are observer of a synthese data + - self_user is the digitiser + When + - associate_user adds a comment + - admin_user adds a comment afterwards + Then + - after the first comment is posted, associate_user does not receive a notification + - user, admin_user and self_user receives a notification since associate_user commented + - associate_user receives a notification since admin_user commented on the observation + associate_user commented on + """ + synthese = synthese_data["obs1"] + + # Post first comment so that associate_user can be notified on future comments + _ = self.post_comment(synthese=synthese, user=users["associate_user"]) + # Check that associate_user is not notified (just in case) + assert ( + Notification.query.filter( + Notification.id_role == users["associate_user"].id_role + ).first() + is None + ) + # Post second comment to notify associate_user on future comments + _ = self.post_comment(synthese=synthese, user=users["admin_user"]) + + # Check that all these roles are notified. Careful, even admin_user is notified + # because of the first comment + user_roles = { + user.id_role + for user in ( + users["associate_user"], + users["admin_user"], + users["self_user"], + users["user"], + ) + } + notifications = Notification.query.filter(Notification.id_role.in_(user_roles)).all() + + assert {notification.id_role for notification in notifications} == user_roles diff --git a/backend/geonature/tests/test_sensitivity.py b/backend/geonature/tests/test_sensitivity.py index 3e9f4501f4..27ed8b39c8 100644 --- a/backend/geonature/tests/test_sensitivity.py +++ b/backend/geonature/tests/test_sensitivity.py @@ -21,7 +21,7 @@ from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes -@pytest.fixture(scope="class") +@pytest.fixture(scope="function") def clean_all_sensitivity_rules(): db.session.execute(sa.delete(CorSensitivityCriteria)) db.session.execute(sa.delete(cor_sensitivity_area)) diff --git a/backend/geonature/tests/test_synthese.py b/backend/geonature/tests/test_synthese.py index 00745f7bcf..b5c3089557 100644 --- a/backend/geonature/tests/test_synthese.py +++ b/backend/geonature/tests/test_synthese.py @@ -1,23 +1,32 @@ import pytest import json +import datetime import itertools +from collections import Counter from flask import url_for, current_app from sqlalchemy import func from werkzeug.exceptions import Forbidden, BadRequest, Unauthorized from jsonschema import validate as validate_json -from geoalchemy2.shape import to_shape -from geojson import Point +from geoalchemy2.shape import to_shape, from_shape +from shapely.geometry import Point from geonature.utils.env import db from geonature.core.gn_meta.models import TDatasets -from geonature.core.gn_synthese.models import Synthese, TSources +from geonature.core.gn_synthese.models import Synthese, TSources, VSyntheseForWebApp from pypnusershub.tests.utils import logged_user_headers, set_logged_user_cookie from ref_geo.models import BibAreasTypes, LAreas from apptax.tests.fixtures import noms_example, attribut_example +from apptax.taxonomie.models import Taxref + + +from pypnusershub.db.models import User +from apptax.taxonomie.models import Taxref +from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes from .fixtures import * +from .fixtures import create_synthese from .utils import jsonschema_definitions @@ -39,12 +48,6 @@ def unexisted_id_source(): return db.session.query(func.max(TSources.id_source)).scalar() + 1 -@pytest.fixture() -def bbox_geom(synthese_data): - """used to check bbox""" - return Point(geometry=to_shape(synthese_data[0].the_geom_4326)) - - @pytest.fixture() def taxon_attribut(noms_example, attribut_example, synthese_data): """ @@ -60,53 +63,89 @@ def taxon_attribut(noms_example, attribut_example, synthese_data): return c +@pytest.fixture() +def synthese_for_observers(source, datasets): + """ + Seems redondant with synthese_data fixture, but synthese data + insert in cor_observers_synthese and run a trigger which override the observers_txt field + """ + now = datetime.datetime.now() + taxon = Taxref.query.first() + point = Point(5.486786, 42.832182) + geom = from_shape(point, srid=4326) + with db.session.begin_nested(): + for obs in ["Vincent", "Camille", "Camille, Xavier"]: + db.session.add( + Synthese( + id_source=source.id_source, + nom_cite=taxon.lb_nom, + cd_nom=taxon.cd_nom, + dataset=datasets["own_dataset"], + date_min=now, + date_max=now, + observers=obs, + the_geom_4326=geom, + the_geom_point=geom, + the_geom_local=func.st_transform(geom, 2154), + ) + ) + + synthese_properties = { "type": "object", "properties": { - "id": {"type": "number"}, - "cd_nom": {"type": "number"}, - "count_min_max": {"type": "string"}, - "dataset_name": {"type": "string"}, - "date_min": {"type": "string"}, - "entity_source_pk_value": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], - }, - "lb_nom": {"type": "string"}, - "nom_vern_or_lb_nom": {"type": "string"}, - "unique_id_sinp": { - "type": "string", - "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", - }, - "observers": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], - }, - "url_source": { - "oneOf": [ - {"type": "null"}, - {"type": "string"}, - ], + "observations": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "number"}, + "cd_nom": {"type": "number"}, + "count_min_max": {"type": "string"}, + "dataset_name": {"type": "string"}, + "date_min": {"type": "string"}, + "entity_source_pk_value": { + "oneOf": [ + {"type": "null"}, + {"type": "string"}, + ], + }, + "lb_nom": {"type": "string"}, + "nom_vern_or_lb_nom": {"type": "string"}, + "unique_id_sinp": { + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", + }, + "observers": { + "oneOf": [ + {"type": "null"}, + {"type": "string"}, + ], + }, + "url_source": { + "oneOf": [ + {"type": "null"}, + {"type": "string"}, + ], + }, + }, + "required": [ # obligatoire pour le fonctionement du front + "id", + "cd_nom", + "url_source", + "entity_source_pk_value", + ], + # "additionalProperties": False, + }, }, }, - "required": [ # obligatoire pour le fonctionement du front - "id", - "cd_nom", - "url_source", - "entity_source_pk_value", - ], - "additionalProperties": False, } @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestSynthese: def test_synthese_scope_filtering(self, app, users, synthese_data): - all_ids = {s.id_synthese for s in synthese_data} + all_ids = {s.id_synthese for s in synthese_data.values()} sq = Synthese.query.with_entities(Synthese.id_synthese).filter( Synthese.id_synthese.in_(all_ids) ) @@ -114,13 +153,15 @@ def test_synthese_scope_filtering(self, app, users, synthese_data): app.preprocess_request() assert sq.filter_by_scope(0).all() == [] - def test_list_sources(self, source): + def test_list_sources(self, source, users): + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for("gn_synthese.get_sources")) assert response.status_code == 200 data = response.get_json() assert len(data) > 0 - def test_get_defaut_nomenclatures(self): + def test_get_defaut_nomenclatures(self, users): + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for("gn_synthese.getDefaultsNomenclatures")) assert response.status_code == 200 @@ -139,81 +180,177 @@ def test_get_observations_for_web(self, users, synthese_data, taxon_attribut): r = self.client.get(url) assert r.status_code == 200 + print(r.json) validate_json(instance=r.json, schema=schema) # test on synonymy and taxref attrs - query_string = { - "cd_ref": taxon_attribut.bib_nom.cd_ref, - "taxhub_attribut_{}".format( - taxon_attribut.bib_attribut.id_attribut - ): taxon_attribut.valeur_attribut, + filters = { + "cd_ref": [taxon_attribut.bib_nom.cd_ref], + "taxhub_attribut_{}".format(taxon_attribut.bib_attribut.id_attribut): [ + taxon_attribut.valeur_attribut + ], } - r = self.client.get(url, query_string=query_string) + r = self.client.post(url, json=filters) assert r.status_code == 200 validate_json(instance=r.json, schema=schema) - assert len(r.json["features"]) == 1 - assert r.json["features"][0]["properties"]["cd_nom"] == taxon_attribut.bib_nom.cd_nom - - # test geometry filters - com_type = BibAreasTypes.query.filter_by(type_code="COM").one() - vesdun = LAreas.query.filter_by(area_type=com_type, area_name="Vesdun").one() - query_string = { - "geoIntersection": """ - POLYGON((2.313844516928274 46.62891246017805,2.654420688803274 46.62891246017805,2.654420688803274 46.415359531851166,2.313844516928274 46.415359531851166,2.313844516928274 46.62891246017805)) - """, - f"area_{com_type.id_type}": vesdun.id_area, + assert len(r.json["features"]) > 0 + for feature in r.json["features"]: + assert feature["properties"]["cd_nom"] == taxon_attribut.bib_nom.cd_nom + + # test intersection filters + filters = { + "geoIntersection": { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [5.852731, 45.7775], + [5.852731, 44.820481], + [7.029224, 44.820481], + [7.029224, 45.7775], + [5.852731, 45.7775], + ], + ], + }, + "properties": {}, + }, } - r = self.client.get(url, query_string=query_string) + r = self.client.post(url, json=filters) assert r.status_code == 200 validate_json(instance=r.json, schema=schema) - assert len(r.json["features"]) >= 2 + assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( + {f["properties"]["id"] for f in r.json["features"]} + ) + assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( + {f["properties"]["id"] for f in r.json["features"]} + ) # test geometry filter with circle radius - query_string = { - "geoIntersection": "POINT ({} {})".format( - current_app.config["MAPCONFIG"]["CENTER"][1] + 0.01, - current_app.config["MAPCONFIG"]["CENTER"][0] - 0.10, - ), - "radius": "20000", # 20km + filters = { + "geoIntersection": { + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [5.92, 45.56], + }, + "properties": { + "radius": "20000", # 20km + }, + }, } - r = self.client.get(url, query_string=query_string) + r = self.client.post(url, json=filters) assert r.status_code == 200 validate_json(instance=r.json, schema=schema) - assert len(r.json["features"]) >= 2 + assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( + {f["properties"]["id"] for f in r.json["features"]} + ) + assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( + {f["properties"]["id"] for f in r.json["features"]} + ) - # test organisms and multiple same arg in query string - id_organisme = users["self_user"].id_organisme - r = self.client.get(f"{url}?id_organism={id_organisme}&id_organism=2") + # test ref geo area filter + com_type = BibAreasTypes.query.filter_by(type_code="COM").one() + chambery = LAreas.query.filter_by(area_type=com_type, area_name="Chambéry").one() + filters = {f"area_{com_type.id_type}": [chambery.id_area]} + r = self.client.post(url, json=filters) assert r.status_code == 200 validate_json(instance=r.json, schema=schema) - assert len(r.json["features"]) >= 2 + assert {synthese_data[k].id_synthese for k in ["p1_af1", "p1_af2"]}.issubset( + {f["properties"]["id"] for f in r.json["features"]} + ) + assert {synthese_data[k].id_synthese for k in ["p2_af1", "p2_af2"]}.isdisjoint( + {f["properties"]["id"] for f in r.json["features"]} + ) + + # test organism + filters = { + "id_organism": [users["self_user"].id_organisme], + } + r = self.client.post(url, json=filters) + assert r.status_code == 200 + validate_json(instance=r.json, schema=schema) + assert len(r.json["features"]) >= 2 # FIXME # test status lr - query_string = {"regulations_protection_status": ["REGLLUTTE"]} - r = self.client.get(url, query_string=query_string) + filters = {"regulations_protection_status": ["REGLLUTTE"]} + r = self.client.get(url, json=filters) assert r.status_code == 200 # test status znieff - query_string = {"znief_protection_status": True} - r = self.client.get(url, query_string=query_string) + filters = {"znief_protection_status": True} + r = self.client.get(url, json=filters) assert r.status_code == 200 # test status protection - query_string = {"protections_protection_status": ["PN"]} - r = self.client.get(url, query_string=query_string) + filters = {"protections_protection_status": ["PN"]} + r = self.client.get(url, json=filters) + assert r.status_code == 200 + # test status protection and znieff + filters = {"protections_protection_status": ["PN"], "znief_protection_status": True} + r = self.client.get(url, json=filters) assert r.status_code == 200 # test LR - query_string = {"worldwide_red_lists": ["LC"]} - r = self.client.get(url, query_string=query_string) + filters = {"worldwide_red_lists": ["LC"]} + r = self.client.get(url, json=filters) assert r.status_code == 200 - query_string = {"european_red_lists": ["LC"]} - r = self.client.get(url, query_string=query_string) + filters = {"european_red_lists": ["LC"]} + r = self.client.get(url, json=filters) assert r.status_code == 200 - query_string = {"national_red_lists": ["LC"]} - r = self.client.get(url, query_string=query_string) + filters = {"national_red_lists": ["LC"]} + r = self.client.get(url, json=filters) assert r.status_code == 200 - query_string = {"regional_red_lists": ["LC"]} - r = self.client.get(url, query_string=query_string) + filters = {"regional_red_lists": ["LC"]} + r = self.client.get(url, json=filters) assert r.status_code == 200 + def test_get_observations_for_web_filter_comment(self, users, synthese_data, taxon_attribut): + set_logged_user_cookie(self.client, users["self_user"]) + + # Post a comment + url = "gn_synthese.create_report" + synthese = synthese_data["obs1"] + id_synthese = synthese.id_synthese + data = {"item": id_synthese, "content": "comment 4", "type": "discussion"} + resp = self.client.post(url_for(url), data=data) + assert resp.status_code == 204 + + # Filter synthese to at least have this comment + url = url_for("gn_synthese.get_observations_for_web") + filters = {"has_comment": True} + r = self.client.get(url, json=filters) + + assert id_synthese in (feature["properties"]["id"] for feature in r.json["features"]) + + def test_get_observations_for_web_filter_id_source(self, users, synthese_data, source): + set_logged_user_cookie(self.client, users["self_user"]) + id_source = source.id_source + + url = url_for("gn_synthese.get_observations_for_web") + filters = {"id_source": id_source} + r = self.client.get(url, json=filters) + + expected_data = { + synthese.id_synthese + for synthese in synthese_data.values() + if synthese.id_source == id_source + } + response_data = {feature["properties"]["id"] for feature in r.json["features"]} + assert expected_data.issubset(response_data) + + @pytest.mark.parametrize( + "observer_input,expected_length_synthese", + [("Vincent", 1), ("Camillé", 2), ("Camille, Elie", 2), ("Jane Doe", 0)], + ) + def test_get_observations_for_web_filter_observers( + self, users, synthese_for_observers, observer_input, expected_length_synthese + ): + set_logged_user_cookie(self.client, users["admin_user"]) + + filters = {"observers": observer_input} + r = self.client.get(url_for("gn_synthese.get_observations_for_web"), json=filters) + for s in r.json["features"]: + print(s) + assert len(r.json["features"]) == expected_length_synthese + def test_get_synthese_data_cruved(self, app, users, synthese_data, datasets): set_logged_user_cookie(self.client, users["self_user"]) @@ -221,15 +358,53 @@ def test_get_synthese_data_cruved(self, app, users, synthese_data, datasets): url_for("gn_synthese.get_observations_for_web"), query_string={"limit": 100} ) data = response.get_json() - features = data["features"] assert len(features) > 0 - assert all( - feat["properties"]["lb_nom"] in [synt.nom_cite for synt in synthese_data] - for feat in features - ) + + for feat in features: + assert feat["properties"]["lb_nom"] in [ + synt.nom_cite for synt in synthese_data.values() + ] assert response.status_code == 200 + def test_get_synthese_data_aggregate(self, users, datasets, synthese_data): + # Test geometry aggregation + set_logged_user_cookie(self.client, users["admin_user"]) + response = self.client.post( + url_for("gn_synthese.get_observations_for_web"), + query_string={ + "format": "grouped_geom", + }, + json={ + "id_dataset": [synthese_data["p1_af1"].id_dataset], + }, + ) + assert response.status_code == 200, response.text + data = response.get_json() + features = data["features"] + # There must be one feature with one obs and one feature with two obs + assert len(features) == 2 + assert Counter([len(f["properties"]["observations"]) for f in features]) == Counter([1, 2]) + + def test_get_synthese_data_aggregate_by_areas(self, users, datasets, synthese_data): + # Test geometry aggregation + set_logged_user_cookie(self.client, users["admin_user"]) + response = self.client.get( + url_for("gn_synthese.get_observations_for_web"), + query_string={ + "format": "grouped_geom_by_areas", + }, + json={ + "id_dataset": [synthese_data["p1_af1"].id_dataset], + }, + ) + assert response.status_code == 200, response.text + data = response.get_json() + features = data["features"] + # There must be one feature with one obs and one feature with two obs + assert len(features) == 2 + assert Counter([len(f["properties"]["observations"]) for f in features]) == Counter([1, 2]) + def test_filter_cor_observers(self, users, synthese_data): """ Test avec un cruved R2 qui join sur cor_synthese_observers @@ -250,7 +425,7 @@ def test_export(self, users): # csv response = self.client.post( url_for("gn_synthese.export_observations_web"), - data=[1, 2, 3], + json=[1, 2, 3], query_string={"export_format": "csv"}, ) @@ -258,31 +433,501 @@ def test_export(self, users): response = self.client.post( url_for("gn_synthese.export_observations_web"), - data=[1, 2, 3], + json=[1, 2, 3], query_string={"export_format": "geojson"}, ) assert response.status_code == 200 response = self.client.post( url_for("gn_synthese.export_observations_web"), - data=[1, 2, 3], + json=[1, 2, 3], query_string={"export_format": "shapefile"}, ) assert response.status_code == 200 - def test_export_status(self, users): - set_logged_user_cookie(self.client, users["self_user"]) + def test_export_observations(self, users, synthese_data, synthese_sensitive_data): + data_synthese = synthese_data.values() + data_synthese_sensitive = synthese_sensitive_data.values() + list_id_synthese = [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese] + list_id_synthese.extend( + [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese_sensitive] + ) - response = self.client.post(url_for("gn_synthese.export_status")) + expected_columns_exports = [ + '"id_synthese"', + '"date_debut"', + '"date_fin"', + '"heure_debut"', + '"heure_fin"', + '"cd_nom"', + '"cd_ref"', + '"nom_valide"', + '"nom_vernaculaire"', + '"nom_cite"', + '"regne"', + '"group1_inpn"', + '"group2_inpn"', + '"classe"', + '"ordre"', + '"famille"', + '"rang_taxo"', + '"nombre_min"', + '"nombre_max"', + '"alti_min"', + '"alti_max"', + '"prof_min"', + '"prof_max"', + '"observateurs"', + '"determinateur"', + '"communes"', + '"geometrie_wkt_4326"', + '"x_centroid_4326"', + '"y_centroid_4326"', + '"nom_lieu"', + '"comment_releve"', + '"comment_occurrence"', + '"validateur"', + '"niveau_validation"', + '"date_validation"', + '"comment_validation"', + '"preuve_numerique_url"', + '"preuve_non_numerique"', + '"jdd_nom"', + '"jdd_uuid"', + '"jdd_id"', + '"ca_nom"', + '"ca_uuid"', + '"ca_id"', + '"cd_habref"', + '"cd_habitat"', + '"nom_habitat"', + '"precision_geographique"', + '"nature_objet_geo"', + '"type_regroupement"', + '"methode_regroupement"', + '"technique_observation"', + '"biologique_statut"', + '"etat_biologique"', + '"biogeographique_statut"', + '"naturalite"', + '"preuve_existante"', + '"niveau_precision_diffusion"', + '"stade_vie"', + '"sexe"', + '"objet_denombrement"', + '"type_denombrement"', + '"niveau_sensibilite"', + '"statut_observation"', + '"floutage_dee"', + '"statut_source"', + '"type_info_geo"', + '"methode_determination"', + '"comportement"', + '"reference_biblio"', + '"id_origine"', + '"uuid_perm_sinp"', + '"uuid_perm_grp_sinp"', + '"date_creation"', + '"date_modification"', + '"champs_additionnels"', + ] - assert response.status_code == 200 + def assert_export_results(user, expected_id_synthese_list): + set_logged_user_cookie(self.client, user) + response = self.client.post( + url_for("gn_synthese.export_observations_web"), + json=list_id_synthese, + query_string={"export_format": "csv"}, + ) + assert response.status_code == 200 + + rows_data_response = response.data.decode("utf-8").split("\r\n")[0:-1] + row_header = rows_data_response[0] + rows_synthese_data_response = rows_data_response[1:] + + assert row_header.split(";") == expected_columns_exports + + expected_response_data_synthese = [ + obs_data_synthese + for obs_data_synthese in data_synthese + if obs_data_synthese.id_synthese in expected_id_synthese_list + ] + expected_response_data_synthese.extend( + [ + obs_data_synthese + for obs_data_synthese in data_synthese_sensitive + if obs_data_synthese.id_synthese in expected_id_synthese_list + ] + ) + nb_expected_synthese_data = len(expected_response_data_synthese) + assert len(rows_synthese_data_response) >= nb_expected_synthese_data + list_id_synthese_data_response = [ + row.split(";")[0] for row in rows_synthese_data_response + ] + assert set( + f'"{expected_id_synthese}"' for expected_id_synthese in expected_id_synthese_list + ).issubset(set(list_id_synthese_data_response)) + # Some checks on the data of the response : cd_nom, comment_occurrence (comment_description in synthese) + for expected_obs_data_synthese in expected_response_data_synthese: + id_synthese_expected_obs_data_synthese = expected_obs_data_synthese.id_synthese + row_response_obs_data_synthese = [ + row + for row in rows_synthese_data_response + if row.split(";")[0] == f'"{id_synthese_expected_obs_data_synthese}"' + ][0] + # Check cd_nom + expected_cd_nom = expected_obs_data_synthese.cd_nom + index_cd_nom_response = expected_columns_exports.index('"cd_nom"') + response_cd_nom = row_response_obs_data_synthese.split(";")[index_cd_nom_response] + assert response_cd_nom == f'"{expected_cd_nom}"' + # Check comment_occurrence + expected_comment_occurrence = expected_obs_data_synthese.comment_description + index_comment_occurrence_response = expected_columns_exports.index( + '"comment_occurrence"' + ) + response_comment_occurrence = row_response_obs_data_synthese.split(";")[ + index_comment_occurrence_response + ] + assert response_comment_occurrence == f'"{expected_comment_occurrence}"' + + ## "self_user" : scope 1 and include sensitive data + user = users["self_user"] + expected_id_synthese_list = [ + synthese_data[name_obs].id_synthese + for name_obs in [ + "obs1", + "obs2", + "obs3", + "p1_af1", + "p1_af1_2", + "p1_af2", + "p2_af2", + "p2_af1", + "p3_af3", + ] + ] + expected_id_synthese_list.extend( + [ + synthese_sensitive_data[name_obs].id_synthese + for name_obs in [ + "obs_sensitive_protected", + "obs_protected_not_sensitive", + "obs_sensitive_protected_2", + ] + ] + ) + assert_export_results(user, expected_id_synthese_list) + + ## "associate_user_2_exclude_sensitive" : scope 2 and exclude sensitive data + user = users["associate_user_2_exclude_sensitive"] + expected_id_synthese_list = [synthese_data[name_obs].id_synthese for name_obs in ["obs1"]] + expected_id_synthese_list.extend( + [ + synthese_sensitive_data[name_obs].id_synthese + for name_obs in ["obs_protected_not_sensitive"] + ] + ) + assert_export_results(user, expected_id_synthese_list) + + def test_export_taxons(self, users, synthese_data, synthese_sensitive_data): + data_synthese = synthese_data.values() + data_synthese_sensitive = synthese_sensitive_data.values() + list_id_synthese = [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese] + list_id_synthese.extend( + [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese_sensitive] + ) - def test_export_metadata(self, users): - set_logged_user_cookie(self.client, users["self_user"]) + expected_columns_exports = [ + '"nom_valide"', + '"cd_ref"', + '"nom_vern"', + '"group1_inpn"', + '"group2_inpn"', + '"regne"', + '"phylum"', + '"classe"', + '"ordre"', + '"famille"', + '"id_rang"', + '"nb_obs"', + '"date_min"', + '"date_max"', + ] + index_colummn_cd_ref = expected_columns_exports.index('"cd_ref"') + + def assert_export_taxons_results(user, set_expected_cd_ref): + set_logged_user_cookie(self.client, user) + + response = self.client.post( + url_for("gn_synthese.export_taxon_web"), + json=list_id_synthese, + ) + + assert response.status_code == 200 + + rows_data_response = response.data.decode("utf-8").split("\r\n")[0:-1] + row_header = rows_data_response[0] + rows_taxons_data_response = rows_data_response[1:] + + assert row_header.split(";") == expected_columns_exports + + nb_expected_cd_noms = len(set_expected_cd_ref) + + assert len(rows_taxons_data_response) >= nb_expected_cd_noms + + set_cd_ref_data_response = set( + row.split(";")[index_colummn_cd_ref] for row in rows_taxons_data_response + ) + + assert set(f'"{expected_cd_ref}"' for expected_cd_ref in set_expected_cd_ref).issubset( + set_cd_ref_data_response + ) + + ## "self_user" : scope 1 and include sensitive data + user = users["self_user"] + set_expected_cd_ref = set( + Taxref.query.filter(Taxref.cd_nom == synthese_data[name_obs].cd_nom).one().cd_ref + for name_obs in [ + "obs1", + "obs2", + "obs3", + "p1_af1", + "p1_af1_2", + "p1_af2", + "p2_af2", + "p2_af1", + "p3_af3", + ] + ) + set_expected_cd_ref.update( + set( + Taxref.query.filter(Taxref.cd_nom == synthese_sensitive_data[name_obs].cd_nom) + .one() + .cd_ref + for name_obs in [ + "obs_sensitive_protected", + "obs_protected_not_sensitive", + "obs_sensitive_protected_2", + ] + ) + ) + assert_export_taxons_results(user, set_expected_cd_ref) - response = self.client.get(url_for("gn_synthese.export_metadata")) + ## "associate_user_2_exclude_sensitive" : scope 2 and exclude sensitive data + user = users["associate_user_2_exclude_sensitive"] + set_expected_cd_ref = set( + Taxref.query.filter(Taxref.cd_nom == synthese_data[name_obs].cd_nom).one().cd_ref + for name_obs in ["obs1"] + ) + set_expected_cd_ref.add( + Taxref.query.filter( + Taxref.cd_nom == synthese_sensitive_data["obs_protected_not_sensitive"].cd_nom + ) + .one() + .cd_ref + ) + assert_export_taxons_results(user, set_expected_cd_ref) + + def test_export_status(self, users, synthese_data, synthese_sensitive_data): + expected_columns_exports = [ + '"nom_complet"', + '"nom_vern"', + '"cd_nom"', + '"cd_ref"', + '"type_regroupement"', + '"type"', + '"territoire_application"', + '"intitule_doc"', + '"code_statut"', + '"intitule_statut"', + '"remarque"', + '"url_doc"', + ] + index_column_cd_nom = expected_columns_exports.index('"cd_nom"') + + def assert_export_status_results(user, set_expected_cd_ref): + set_logged_user_cookie(self.client, user) + + response = self.client.post( + url_for("gn_synthese.export_status"), + ) + + assert response.status_code == 200 + + rows_data_response = response.data.decode("utf-8").split("\r\n")[0:-1] + row_header = rows_data_response[0] + rows_taxons_data_response = rows_data_response[1:] + + assert row_header.split(";") == expected_columns_exports + + nb_expected_cd_ref = len(set_expected_cd_ref) + set_cd_ref_data_response = set( + row.split(";")[index_column_cd_nom] for row in rows_taxons_data_response + ) + nb_cd_ref_response = len(set_cd_ref_data_response) + + assert nb_cd_ref_response >= nb_expected_cd_ref + + assert set(f'"{expected_cd_ref}"' for expected_cd_ref in set_expected_cd_ref).issubset( + set_cd_ref_data_response + ) + + ## "self_user" : scope 1 and include sensitive data + user = users["self_user"] + set_expected_cd_ref = set( + Taxref.query.filter(Taxref.cd_nom == synthese_sensitive_data[name_obs].cd_nom) + .one() + .cd_ref + for name_obs in [ + "obs_sensitive_protected", + "obs_protected_not_sensitive", + "obs_sensitive_protected_2", + ] + ) + assert_export_status_results(user, set_expected_cd_ref) + + ## "associate_user_2_exclude_sensitive" : scope 2 and exclude sensitive data + user = users["associate_user_2_exclude_sensitive"] + set_expected_cd_ref = set( + Taxref.query.filter(Taxref.cd_nom == synthese_sensitive_data[name_obs].cd_nom) + .one() + .cd_ref + for name_obs in ["obs_protected_not_sensitive"] + ) + assert_export_status_results(user, set_expected_cd_ref) + + def test_export_metadata(self, users, synthese_data, synthese_sensitive_data): + data_synthese = synthese_data.values() + data_synthese_sensitive = synthese_sensitive_data.values() + list_id_synthese = [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese] + list_id_synthese.extend( + [obs_data_synthese.id_synthese for obs_data_synthese in data_synthese_sensitive] + ) - assert response.status_code == 200 + expected_columns_exports = [ + '"jeu_donnees"', + '"jdd_id"', + '"jdd_uuid"', + '"cadre_acquisition"', + '"ca_uuid"', + '"acteurs"', + '"nombre_total_obs"', + ] + index_column_jdd_id = expected_columns_exports.index('"jdd_id"') + + # TODO: assert that some data is excluded from the response + def assert_export_metadata_results(user, dict_expected_datasets): + set_logged_user_cookie(self.client, user) + + response = self.client.post( + url_for("gn_synthese.export_metadata"), + ) + + assert response.status_code == 200 + + rows_data_response = response.data.decode("utf-8").split("\r\n")[0:-1] + row_header = rows_data_response[0] + rows_datasets_data_response = rows_data_response[1:] + + assert row_header.split(";") == expected_columns_exports + + nb_expected_datasets = len(dict_expected_datasets) + set_id_datasets_data_response = set( + row.split(";")[index_column_jdd_id] for row in rows_datasets_data_response + ) + nb_datasets_response = len(set_id_datasets_data_response) + + assert nb_datasets_response >= nb_expected_datasets + + set_expected_id_datasets = set(dict_expected_datasets.keys()) + assert set( + f'"{expected_id_dataset}"' for expected_id_dataset in set_expected_id_datasets + ).issubset(set_id_datasets_data_response) + + for expected_id_dataset, expected_nb_obs in dict_expected_datasets.items(): + row_dataset_data_response = [ + row + for row in rows_datasets_data_response + if row.split(";")[index_column_jdd_id] == f'"{expected_id_dataset}"' + ][0] + nb_obs_response = row_dataset_data_response.split(";")[-1] + assert nb_obs_response >= f'"{expected_nb_obs}"' + + ## "self_user" : scope 1 and include sensitive data + user = users["self_user"] + # Create a dict (id_dataset, nb_obs) for the expected data + dict_expected_datasets = {} + expected_data_synthese = [ + obs_synthese + for name_obs, obs_synthese in synthese_data.items() + if name_obs + in [ + "obs1", + "obs2", + "obs3", + "p1_af1", + "p1_af1_2", + "p1_af2", + "p2_af2", + "p2_af1", + "p3_af3", + ] + ] + for obs_data_synthese in expected_data_synthese: + id_dataset = obs_data_synthese.id_dataset + if id_dataset in dict_expected_datasets: + dict_expected_datasets[id_dataset] += 1 + else: + dict_expected_datasets[id_dataset] = 1 + expected_data_synthese = [ + obs_synthese + for name_obs, obs_synthese in synthese_sensitive_data.items() + if name_obs + in [ + "obs_sensitive_protected", + "obs_protected_not_sensitive", + "obs_sensitive_protected_2", + ] + ] + for obs_data_synthese in expected_data_synthese: + id_dataset = obs_data_synthese.id_dataset + if id_dataset in dict_expected_datasets: + dict_expected_datasets[id_dataset] += 1 + else: + dict_expected_datasets[id_dataset] = 1 + assert_export_metadata_results(user, dict_expected_datasets) + + ## "associate_user_2_exclude_sensitive" : scope 2 and exclude sensitive data + user = users["associate_user_2_exclude_sensitive"] + # Create a dict (id_dataset, nb_obs) for the expected data + dict_expected_datasets = {} + expected_data_synthese = [ + obs_synthese + for name_obs, obs_synthese in synthese_data.items() + if name_obs in ["obs1"] + ] + for obs_data_synthese in expected_data_synthese: + id_dataset = obs_data_synthese.id_dataset + if id_dataset in dict_expected_datasets: + dict_expected_datasets[id_dataset] += 1 + else: + dict_expected_datasets[id_dataset] = 1 + expected_data_synthese = [ + obs_synthese + for name_obs, obs_synthese in synthese_sensitive_data.items() + if name_obs + in [ + "obs_protected_not_sensitive", + ] + ] + for obs_data_synthese in expected_data_synthese: + id_dataset = obs_data_synthese.id_dataset + if id_dataset in dict_expected_datasets: + dict_expected_datasets[id_dataset] += 1 + else: + dict_expected_datasets[id_dataset] = 1 + # TODO: s'assurer qu'on ne récupère pas le dataset "associate_2_dataset_sensitive", car ne contient que des données sensibles, bien que l'utilisateur ait le scope nécessaire par ailleurs (scope 2, et ce dataset lui est associé) + assert_export_metadata_results(user, dict_expected_datasets) def test_general_stat(self, users): set_logged_user_cookie(self.client, users["self_user"]) @@ -293,13 +938,13 @@ def test_general_stat(self, users): def test_get_one_synthese_record(self, app, users, synthese_data): response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 401 set_logged_user_cookie(self.client, users["noright_user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 403 @@ -312,36 +957,37 @@ def test_get_one_synthese_record(self, app, users, synthese_data): set_logged_user_cookie(self.client, users["admin_user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 200 set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 200 set_logged_user_cookie(self.client, users["user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 200 set_logged_user_cookie(self.client, users["associate_user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == 200 set_logged_user_cookie(self.client, users["stranger_user"]) response = self.client.get( - url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data[0].id_synthese) + url_for("gn_synthese.get_one_synthese", id_synthese=synthese_data["obs1"].id_synthese) ) assert response.status_code == Forbidden.code - def test_color_taxon(self, synthese_data): + def test_color_taxon(self, synthese_data, users): # Note: require grids 5×5! + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for("gn_synthese.get_color_taxon")) assert response.status_code == 200 @@ -376,9 +1022,13 @@ def test_color_taxon(self, synthese_data): }, ) - def test_taxa_distribution(self, synthese_data): - s = synthese_data[0] + def test_taxa_distribution(self, users, synthese_data): + s = synthese_data["p1_af1"] + response = self.client.get(url_for("gn_synthese.get_taxa_distribution")) + assert response.status_code == Unauthorized.code + + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for("gn_synthese.get_taxa_distribution")) assert response.status_code == 200 assert len(response.json) @@ -410,32 +1060,37 @@ def test_taxa_distribution(self, synthese_data): assert response.status_code == 200 assert len(response.json) - def test_get_taxa_count(self, synthese_data): + def test_get_taxa_count(self, synthese_data, users): + set_logged_user_cookie(self.client, users["self_user"]) + response = self.client.get(url_for("gn_synthese.get_taxa_count")) - assert response.json >= len(set(synt.cd_nom for synt in synthese_data)) + assert response.json >= len(set(synt.cd_nom for synt in synthese_data.values())) - def test_get_taxa_count_id_dataset(self, synthese_data, datasets, unexisted_id): + def test_get_taxa_count_id_dataset(self, synthese_data, users, datasets, unexisted_id): id_dataset = datasets["own_dataset"].id_dataset url = "gn_synthese.get_taxa_count" + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for(url), query_string={"id_dataset": id_dataset}) response_empty = self.client.get(url_for(url), query_string={"id_dataset": unexisted_id}) - assert response.json == len(set(synt.cd_nom for synt in synthese_data)) + assert response.json == len(set(synt.cd_nom for synt in synthese_data.values())) assert response_empty.json == 0 - def test_get_observation_count(self, synthese_data): + def test_get_observation_count(self, synthese_data, users): nb_observations = len(synthese_data) + set_logged_user_cookie(self.client, users["admin_user"]) response = self.client.get(url_for("gn_synthese.get_observation_count")) assert response.json >= nb_observations - def test_get_observation_count_id_dataset(self, synthese_data, datasets, unexisted_id): + def test_get_observation_count_id_dataset(self, synthese_data, users, datasets, unexisted_id): id_dataset = datasets["own_dataset"].id_dataset - nb_observations = len(synthese_data) + nb_observations = len([s for s in synthese_data.values() if s.id_dataset == id_dataset]) url = "gn_synthese.get_observation_count" + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for(url), query_string={"id_dataset": id_dataset}) response_empty = self.client.get(url_for(url), query_string={"id_dataset": unexisted_id}) @@ -443,55 +1098,50 @@ def test_get_observation_count_id_dataset(self, synthese_data, datasets, unexist assert response.json == nb_observations assert response_empty.json == 0 - def test_get_bbox(self, synthese_data): - # In synthese, all entries are located at the same point - geom = Point(geometry=to_shape(synthese_data[0].the_geom_4326)) + def test_get_bbox(self, synthese_data, users): + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for("gn_synthese.get_bbox")) assert response.status_code == 200 assert response.json["type"] in ["Point", "Polygon"] - def test_get_bbox_id_dataset(self, synthese_data, datasets, unexisted_id): + def test_get_bbox_id_dataset(self, synthese_data, users, datasets, unexisted_id): id_dataset = datasets["own_dataset"].id_dataset - # In synthese, all entries are located at the same point - geom = Point(geometry=to_shape(synthese_data[0].the_geom_4326)) url = "gn_synthese.get_bbox" + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for(url), query_string={"id_dataset": id_dataset}) assert response.status_code == 200 - assert response.json["type"] == "Point" - assert response.json["coordinates"] == [ - pytest.approx(coord, 0.9) for coord in [geom.geometry.x, geom.geometry.y] - ] + assert response.json["type"] == "Polygon" response_empty = self.client.get(url_for(url), query_string={"id_dataset": unexisted_id}) assert response_empty.status_code == 204 assert response_empty.get_data(as_text=True) == "" - def test_get_bbox_id_source(self, bbox_geom, source): + def test_get_bbox_id_source(self, synthese_data, users, source): id_source = source.id_source url = "gn_synthese.get_bbox" + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for(url), query_string={"id_source": id_source}) assert response.status_code == 200 - assert response.json["type"] == "Point" - assert response.json["coordinates"] == [ - pytest.approx(coord, 0.9) for coord in [bbox_geom.geometry.x, bbox_geom.geometry.y] - ] + assert response.json["type"] == "Polygon" - def test_get_bbox_id_source_empty(self, unexisted_id_source): + def test_get_bbox_id_source_empty(self, users, unexisted_id_source): url = "gn_synthese.get_bbox" + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get(url_for(url), query_string={"id_source": unexisted_id_source}) assert response.status_code == 204 assert response.json is None - def test_observation_count_per_column(self, synthese_data): + def test_observation_count_per_column(self, users, synthese_data): column_name_dataset = "id_dataset" column_name_cd_nom = "cd_nom" + set_logged_user_cookie(self.client, users["self_user"]) response_dataset = self.client.get( url_for("gn_synthese.observation_count_per_column", column=column_name_dataset) @@ -506,7 +1156,9 @@ def test_observation_count_per_column(self, synthese_data): "id_dataset": k, "count": len(list(g)), } - for k, g in itertools.groupby(sorted(synthese_data, key=ds_keyfunc), key=ds_keyfunc) + for k, g in itertools.groupby( + sorted(synthese_data.values(), key=ds_keyfunc), key=ds_keyfunc + ) ] cn_keyfunc = lambda s: s.cd_nom @@ -515,7 +1167,9 @@ def test_observation_count_per_column(self, synthese_data): "cd_nom": k, "count": len(list(g)), } - for k, g in itertools.groupby(sorted(synthese_data, key=cn_keyfunc), key=cn_keyfunc) + for k, g in itertools.groupby( + sorted(synthese_data.values(), key=cn_keyfunc), key=cn_keyfunc + ) ] resp_json = response_dataset.json @@ -534,8 +1188,10 @@ def test_observation_count_per_column(self, synthese_data): if item["cd_nom"] == test_cd_nom["cd_nom"]: assert item["count"] >= test_cd_nom["count"] - def test_get_autocomplete_taxons_synthese(self, synthese_data): - seach_name = synthese_data[0].nom_cite + def test_get_autocomplete_taxons_synthese(self, synthese_data, users): + seach_name = synthese_data["obs1"].nom_cite + + set_logged_user_cookie(self.client, users["self_user"]) response = self.client.get( url_for("gn_synthese.get_autocomplete_taxons_synthese"), @@ -543,4 +1199,4 @@ def test_get_autocomplete_taxons_synthese(self, synthese_data): ) assert response.status_code == 200 - assert response.json[0]["cd_nom"] == synthese_data[0].cd_nom + assert response.json[0]["cd_nom"] == synthese_data["obs1"].cd_nom diff --git a/backend/geonature/tests/test_synthese_logs.py b/backend/geonature/tests/test_synthese_logs.py new file mode 100644 index 0000000000..eb0daaae2a --- /dev/null +++ b/backend/geonature/tests/test_synthese_logs.py @@ -0,0 +1,153 @@ +from datetime import datetime, timedelta + +import pytest + +from flask import url_for +from sqlalchemy.dialects import postgresql +from sqlalchemy import and_ +from werkzeug.exceptions import Unauthorized, BadRequest +from werkzeug.datastructures import MultiDict + +from geonature.utils.env import db +from geonature.core.gn_synthese.models import SyntheseLogEntry + +from pypnusershub.tests.utils import set_logged_user_cookie + +from .fixtures import * + + +@pytest.fixture() +def delete_synthese(): + synthese = Synthese.query.first() + with db.session.begin_nested(): + db.session.delete(synthese) + return synthese + + +@pytest.mark.usefixtures("client_class", "temporary_transaction") +class TestSyntheseLogs: + def test_synthese_log_deletion_trigger(self, synthese_data): + """ + Test delete synthese trigger insert into t_log_synthese + """ + + obs = synthese_data["obs1"] + assert not db.session.query( + SyntheseLogEntry.query.filter_by(id_synthese=obs.id_synthese).exists() + ).scalar() + with db.session.begin_nested(): + db.session.delete(obs) + assert db.session.query( + SyntheseLogEntry.query.filter_by(id_synthese=obs.id_synthese).exists() + ).scalar() + + def test_list_synthese_log_entries_unauthenticated(self, users): + url = url_for("gn_synthese.list_synthese_log_entries") + + response = self.client.get(url) + assert response.status_code == Unauthorized.code + + def test_list_synthese_log_entries(self, users, synthese_data): + url = url_for("gn_synthese.list_synthese_log_entries") + set_logged_user_cookie(self.client, users["self_user"]) + + created_obs = synthese_data["obs1"] + updated_obs = synthese_data["obs2"] + deleted_obs = synthese_data["obs3"] + with db.session.begin_nested(): + updated_obs.comment_description = "updated" + # Update trigger set meta_update_date to NOW(), but NOW() always + # return the start time of a transaction, so meta_update_date is not + # increased. As a workarround, we decrease meta_create_date (not touched + # by the trigger) to be sure that meta_create_date < meta_update_date. + updated_obs.meta_create_date -= timedelta(seconds=1) + db.session.delete(deleted_obs) + + response = self.client.get( + url, + query_string={ + "meta_last_action_date": "gte:{}".format(datetime.now().isoformat()), + "sort": "meta_last_action_date", + }, + ) + assert response.status_code == 200, response.json + + def test_list_synthese_log_entries_sort(self, users, synthese_data): + url = url_for("gn_synthese.list_synthese_log_entries") + set_logged_user_cookie(self.client, users["self_user"]) + + response = self.client.get(url, query_string={"sort": "invalid"}) + assert response.status_code == BadRequest.code, response.json + + response = self.client.get(url, query_string={"sort": "meta_last_action_date"}) + assert response.status_code == 200, response.json + + response = self.client.get(url, query_string={"sort": "meta_last_action_date:asc"}) + assert response.status_code == 200, response.json + + response = self.client.get(url, query_string={"sort": "meta_last_action_date:desc"}) + assert response.status_code == 200, response.json + + def test_list_synthese_log_entries_filter_last_action(self, users, synthese_data): + url = url_for("gn_synthese.list_synthese_log_entries") + set_logged_user_cookie(self.client, users["self_user"]) + + created_obs = synthese_data["obs1"] + updated_obs = synthese_data["obs2"] + deleted_obs = synthese_data["obs3"] + with db.session.begin_nested(): + updated_obs.comment_description = "updated" + # see comment above + updated_obs.meta_create_date -= timedelta(seconds=1) + db.session.delete(deleted_obs) + + response = self.client.get( + url, + query_string={"id_synthese": created_obs.id_synthese, "last_action": "I"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 1 + (obs1,) = response.json["items"] + assert obs1["id_synthese"] == created_obs.id_synthese + assert obs1["last_action"] == "I" + + response = self.client.get( + url, + query_string={"id_synthese": created_obs.id_synthese, "last_action": "U"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 0 + + response = self.client.get( + url, + query_string={"id_synthese": updated_obs.id_synthese, "last_action": "U"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 1 + (obs1,) = response.json["items"] + assert obs1["id_synthese"] == updated_obs.id_synthese + assert obs1["last_action"] == "U" + + response = self.client.get( + url, + query_string={"id_synthese": updated_obs.id_synthese, "last_action": "I"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 0 + + response = self.client.get( + url, + query_string={"id_synthese": deleted_obs.id_synthese, "last_action": "D"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 1 + (obs1,) = response.json["items"] + assert obs1["id_synthese"] == deleted_obs.id_synthese + assert obs1["last_action"] == "D" + + response = self.client.get( + url, + query_string={"id_synthese": deleted_obs.id_synthese, "last_action": "U"}, + ) + assert response.status_code == 200, response.json + assert len(response.json["items"]) == 0 diff --git a/backend/geonature/tests/test_validation.py b/backend/geonature/tests/test_validation.py index e154f91e74..afae8f758e 100644 --- a/backend/geonature/tests/test_validation.py +++ b/backend/geonature/tests/test_validation.py @@ -39,7 +39,7 @@ def test_get_status_names(self, users, synthese_data): def test_add_validation_status(self, users, synthese_data): set_logged_user_cookie(self.client, users["user"]) - synthese = synthese_data[0] + synthese = synthese_data["obs1"] id_nomenclature_valid_status = TNomenclatures.query.filter( sa.and_( TNomenclatures.cd_nomenclature == "1", @@ -72,7 +72,7 @@ def test_get_validation_history(self, users, synthese_data): set_logged_user_cookie(self.client, users["user"]) response = self.client.get(url_for("gn_commons.get_hist", uuid_attached_row="invalid")) assert response.status_code == BadRequest.code - s = next(filter(lambda s: s.unique_id_sinp, synthese_data)) + s = next(filter(lambda s: s.unique_id_sinp, synthese_data.values())) response = self.client.get( url_for("gn_commons.get_hist", uuid_attached_row=s.unique_id_sinp) ) diff --git a/backend/geonature/utils/command.py b/backend/geonature/utils/command.py index 1564a80876..98b3a15df4 100644 --- a/backend/geonature/utils/command.py +++ b/backend/geonature/utils/command.py @@ -6,8 +6,9 @@ fichiers de routing du frontend etc...). Ces dernières doivent pouvoir fonctionner même si un paquet PIP du requirement GeoNature n'a pas été bien installé """ +import os import json -import subprocess +from subprocess import run, DEVNULL from contextlib import nullcontext from jinja2 import Template @@ -41,12 +42,13 @@ def create_frontend_module_config(module_code, output_file=None): """ Create the frontend config """ - module_code = module_code.upper() - module_config = get_module_config(get_dist_from_code(module_code)) + module_frontend_dir = FRONTEND_DIR / "external_modules" / module_code.lower() + # for modules without frontend or with disabled frontend + if not module_frontend_dir.exists(): + return + module_config = get_module_config(get_dist_from_code(module_code.upper())) if output_file is None: - output_file = ( - FRONTEND_DIR / "external_modules" / module_code.lower() / "app/module.config.ts" - ).open("w") + output_file = (module_frontend_dir / "app/module.config.ts").open("w") else: output_file = nullcontext(output_file) with output_file as f: @@ -54,19 +56,21 @@ def create_frontend_module_config(module_code, output_file=None): json.dump(module_config, f, indent=True, sort_keys=True) +def nvm_available(): + return run(["/usr/bin/env", "bash", "-i", "-c", "type -t nvm"], stdout=DEVNULL).returncode == 0 + + def install_frontend_dependencies(module_frontend_path): - with (FRONTEND_DIR / ".nvmrc").open("r") as f: - node_version = f.read().strip() - subprocess.run( - ["/bin/bash", "-i", "-c", f"nvm exec {node_version} npm ci --omit=dev --omit=peer"], - check=True, - cwd=module_frontend_path, - ) + cmd = ["npm", "ci", "--omit=dev", "--omit=peer"] + if nvm_available(): + with (FRONTEND_DIR / ".nvmrc").open("r") as f: + node_version = f.read().strip() + cmd = ["/usr/bin/env", "bash", "-i", "-c", f"nvm exec {node_version} {' '.join(cmd)}"] + run(cmd, check=True, cwd=module_frontend_path) def build_frontend(): - subprocess.run( - ["/bin/bash", "-i", "-c", "nvm exec npm run build"], - check=True, - cwd=str(FRONTEND_DIR), - ) + cmd = ["npm", "run", "build"] + if nvm_available(): + cmd = ["/usr/bin/env", "bash", "-i", "-c", f"nvm exec {' '.join(cmd)}"] + run(cmd, check=True, cwd=str(FRONTEND_DIR)) diff --git a/backend/geonature/utils/config_schema.py b/backend/geonature/utils/config_schema.py index 196dc28680..395bc4e347 100644 --- a/backend/geonature/utils/config_schema.py +++ b/backend/geonature/utils/config_schema.py @@ -4,8 +4,6 @@ import os -from pkg_resources import iter_entry_points, load_entry_point - from marshmallow import ( Schema, fields, @@ -20,8 +18,8 @@ DEFAULT_LIST_COLUMN, DEFAULT_COLUMNS_API_SYNTHESE, ) -from geonature.utils.env import GEONATURE_VERSION -from geonature.utils.module import get_module_config +from geonature.utils.env import GEONATURE_VERSION, BACKEND_DIR, ROOT_DIR +from geonature.utils.module import iter_modules_dist, get_module_config from geonature.utils.utilsmails import clean_recipients from geonature.utils.utilstoml import load_and_validate_toml @@ -29,14 +27,15 @@ class EmailStrOrListOfEmailStrField(fields.Field): def _deserialize(self, value, attr, data, **kwargs): if isinstance(value, str): - self._check_email(value) - return value - elif isinstance(value, list) and all(isinstance(x, str) for x in value): - self._check_email(value) - return value - else: + value = list(map(lambda x: x.replace("\n", "").strip(), value.split(","))) + + if not isinstance(value, list) and all(isinstance(x, str) for x in value): raise ValidationError("Field should be str or list of str") + self._check_email(value) + + return value + def _check_email(self, value): recipients = clean_recipients(value) for recipient in recipients: @@ -71,10 +70,11 @@ class CasSchemaConf(Schema): class MTDSchemaConf(Schema): JDD_MODULE_CODE_ASSOCIATION = fields.List(fields.String, load_default=["OCCTAX", "OCCHAB"]) ID_INSTANCE_FILTER = fields.Integer(load_default=None) + SYNC_LOG_LEVEL = fields.String(load_default="INFO") class BddConfig(Schema): - ID_USER_SOCLE_1 = fields.Integer(load_default=8) + ID_USER_SOCLE_1 = fields.Integer(load_default=7) ID_USER_SOCLE_2 = fields.Integer(load_default=6) @@ -142,6 +142,14 @@ class AdditionalFields(Schema): ) +class HomeConfig(Schema): + TITLE = fields.String(load_default="Bienvenue dans GeoNature") + INTRODUCTION = fields.String( + load_default="Texte d'introduction, configurable pour le modifier régulièrement ou le masquer" + ) + FOOTER = fields.String(load_default="") + + class MetadataConfig(Schema): NB_AF_DISPLAYED = fields.Integer(load_default=50, validate=OneOf([10, 25, 50, 100])) ENABLE_CLOSE_AF = fields.Boolean(load_default=False) @@ -190,11 +198,10 @@ class GnPySchemaConf(Schema): COOKIE_AUTORENEW = fields.Boolean(load_default=True) TRAP_ALL_EXCEPTIONS = fields.Boolean(load_default=False) SENTRY_DSN = fields.String() - - UPLOAD_FOLDER = fields.String(load_default="static/medias") - BASE_DIR = fields.String( - load_default=os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) - ) + ROOT_PATH = fields.String(load_default=BACKEND_DIR) + STATIC_FOLDER = fields.String(load_default="static") + CUSTOM_STATIC_FOLDER = fields.String(load_default=ROOT_DIR / "custom") + MEDIA_FOLDER = fields.String(load_default="media") CAS = fields.Nested(CasSchemaConf, load_default=CasSchemaConf().load({})) MAIL_ON_ERROR = fields.Boolean(load_default=False) MAIL_CONFIG = fields.Nested(MailConfig, load_default=MailConfig().load({})) @@ -210,6 +217,16 @@ class GnPySchemaConf(Schema): MEDIAS = fields.Nested(MediasConfig, load_default=MediasConfig().load({})) ALEMBIC = fields.Nested(AlembicConfig, load_default=AlembicConfig().load({})) + @post_load() + def folders(self, data, **kwargs): + data["STATIC_FOLDER"] = os.path.join(data["ROOT_PATH"], data["STATIC_FOLDER"]) + if "CUSTOM_STATIC_FOLDER" in data: + data["CUSTOM_STATIC_FOLDER"] = os.path.join( + data["ROOT_PATH"], data["CUSTOM_STATIC_FOLDER"] + ) + data["MEDIA_FOLDER"] = os.path.join(data["ROOT_PATH"], data["MEDIA_FOLDER"]) + return data + @post_load() def unwrap_usershub(self, data, **kwargs): """ @@ -233,7 +250,10 @@ def validate_enable_usershub_and_mail(self, data, **kwargs): or data["USERSHUB"].get("ADMIN_APPLICATION_PASSWORD", None) is None ): raise ValidationError( - "URL_USERSHUB, ADMIN_APPLICATION_LOGIN et ADMIN_APPLICATION_PASSWORD sont necessaires si ENABLE_SIGN_UP=True", + ( + "URL_USERSHUB, ADMIN_APPLICATION_LOGIN et ADMIN_APPLICATION_PASSWORD sont necessaires si ENABLE_SIGN_UP=True " + "ou si ENABLE_USER_MANAGEMENT=True" + ), "URL_USERSHUB", ) if data["MAIL_CONFIG"].get("MAIL_SERVER", None) is None: @@ -326,6 +346,9 @@ class Synthese(Schema): ], ) + # Filtres par défaut pour la synthese + DEFAULT_FILTERS = fields.Dict(load_default={}) + # -------------------------------------------------------------------- # SYNTHESE - OBSERVATIONS LIST # Listes des champs renvoyés par l'API synthese '/synthese' @@ -386,6 +409,26 @@ class Synthese(Schema): ALERT_MODULES = fields.List(fields.String(), load_default=["SYNTHESE", "VALIDATION"]) # Allow to activate pin tool for any, some or all VALIDATION, SYNTHESE PIN_MODULES = fields.List(fields.String(), load_default=["SYNTHESE", "VALIDATION"]) + # Enable areas vizualisation with toggle slide + AREA_AGGREGATION_ENABLED = fields.Boolean(load_default=True) + # Choose size of areas + AREA_AGGREGATION_TYPE = fields.String(load_default="M10") + # Activate areas mode by default + AREA_AGGREGATION_BY_DEFAULT = fields.Boolean(load_default=False) + # Areas legend classes to use + AREA_AGGREGATION_LEGEND_CLASSES = fields.List( + fields.Dict(), + load_default=[ + {"min": 100, "color": "#800026"}, + {"min": 50, "color": "#BD0026"}, + {"min": 20, "color": "#E31A1C"}, + {"min": 10, "color": "#FC4E2A"}, + {"min": 5, "color": "#FD8D3C"}, + {"min": 2, "color": "#FEB24C"}, + {"min": 1, "color": "#FED976"}, + {"min": 0, "color": "#FFEDA0"}, + ], + ) # Map configuration @@ -420,6 +463,7 @@ class MapConfig(Schema): CENTER = fields.List(fields.Float, load_default=[46.52863469527167, 2.43896484375]) ZOOM_LEVEL = fields.Integer(load_default=6) ZOOM_LEVEL_RELEVE = fields.Integer(load_default=15) + GEOLOCATION = fields.Boolean(load_default=False) # zoom appliqué sur la carte lorsque l'on clique sur une liste # ne s'applique qu'aux points ZOOM_ON_CLICK = fields.Integer(load_default=18) @@ -477,7 +521,6 @@ class TaxHub(Schema): # class a utiliser pour les paramètres que l'on veut passer au frontend class GnGeneralSchemaConf(Schema): appName = fields.String(load_default="GeoNature2") - LOGO_STRUCTURE_FILE = fields.String(load_default="logo_structure.png") GEONATURE_VERSION = fields.String(load_default=GEONATURE_VERSION.strip()) DEFAULT_LANGUAGE = fields.String(load_default="fr") PASS_METHOD = fields.String(load_default="hash", validate=OneOf(["hash", "md5"])) @@ -502,14 +545,18 @@ class GnGeneralSchemaConf(Schema): AccountManagement, load_default=AccountManagement().load({}) ) MEDIAS = fields.Nested(MediasConfig, load_default=MediasConfig().load({})) - UPLOAD_FOLDER = fields.String(load_default="static/medias") + STATIC_URL = fields.String(load_default="/static") + MEDIA_URL = fields.String(load_default="/media") METADATA = fields.Nested(MetadataConfig, load_default=MetadataConfig().load({})) MTD = fields.Nested(MTDSchemaConf, load_default=MTDSchemaConf().load({})) NB_MAX_DATA_SENSITIVITY_REPORT = fields.Integer(load_default=1000000) ADDITIONAL_FIELDS = fields.Nested(AdditionalFields, load_default=AdditionalFields().load({})) PUBLIC_ACCESS_USERNAME = fields.String(load_default="") TAXHUB = fields.Nested(TaxHub, load_default=TaxHub().load({})) + HOME = fields.Nested(HomeConfig, load_default=HomeConfig().load({})) NOTIFICATIONS_ENABLED = fields.Boolean(load_default=True) + PROFILES_REFRESH_CRONTAB = fields.String(load_default="0 3 * * *") + MEDIA_CLEAN_CRONTAB = fields.String(load_default="0 1 * * *") @validates_schema def validate_enable_sign_up(self, data, **kwargs): @@ -537,9 +584,9 @@ def validate_account_autovalidation(self, data, **kwargs): @post_load def insert_module_config(self, data, **kwargs): - for module_code_entry in iter_entry_points("gn_module", "code"): - module_code = module_code_entry.resolve() + for dist in iter_modules_dist(): + module_code = dist.entry_points["code"].load() if module_code in data["DISABLED_MODULES"]: continue - data[module_code] = get_module_config(module_code_entry.dist) + data[module_code] = get_module_config(dist) return data diff --git a/backend/geonature/utils/env.py b/backend/geonature/utils/env.py index a4d08577b4..6ee46064dc 100644 --- a/backend/geonature/utils/env.py +++ b/backend/geonature/utils/env.py @@ -1,9 +1,13 @@ """ Helpers to manipulate the execution environment """ import os - +import sys from pathlib import Path -import pkg_resources + +if sys.version_info < (3, 9): + from importlib_metadata import version, PackageNotFoundError +else: + from importlib.metadata import version, PackageNotFoundError from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow @@ -21,8 +25,8 @@ FRONTEND_DIR = ROOT_DIR / "frontend" try: - GEONATURE_VERSION = pkg_resources.get_distribution("geonature").version -except pkg_resources.DistributionNotFound: + GEONATURE_VERSION = version("geonature") +except PackageNotFoundError: with open(str((ROOT_DIR / "VERSION"))) as v: GEONATURE_VERSION = v.read() diff --git a/backend/geonature/utils/filemanager.py b/backend/geonature/utils/filemanager.py index c86269bf24..ef370286c0 100644 --- a/backend/geonature/utils/filemanager.py +++ b/backend/geonature/utils/filemanager.py @@ -1,7 +1,6 @@ import os import unicodedata import shutil -import logging import datetime import re from pathlib import Path @@ -10,44 +9,6 @@ from flask import current_app, render_template from weasyprint import HTML, CSS -# get the root logger -log = logging.getLogger() - - -def remove_file(filepath, absolute_path=False): - try: - if absolute_path: - os.remove(filepath) - else: - os.remove(os.path.join(current_app.config["BASE_DIR"], filepath)) - except Exception: - pass - - -def rename_file(old_chemin, old_title, new_title): - new_chemin = old_chemin.replace( - removeDisallowedFilenameChars(old_title), - removeDisallowedFilenameChars(new_title), - ) - os.rename( - os.path.join(current_app.config["BASE_DIR"], old_chemin), - os.path.join(current_app.config["BASE_DIR"], new_chemin), - ) - return new_chemin - - -def upload_file(file, id_media, cd_ref, titre): - - filename = ("{cd_ref}_{id_media}_{title}.{ext}").format( - cd_ref=str(cd_ref), - id_media=str(id_media), - title=removeDisallowedFilenameChars(titre), - ext=file.filename.rsplit(".", 1)[1], - ) - filepath = os.path.join(current_app.config["UPLOAD_FOLDER"], filename) - file.save(os.path.join(current_app.config["BASE_DIR"], filepath)) - return filepath - def removeDisallowedFilenameChars(uncleanString): cleanedString = secure_filename(uncleanString) @@ -69,24 +30,22 @@ def delete_recursively(path_folder, period=1, excluded_files=[]): for the_file in os.listdir(path_folder): file_path = os.path.join(path_folder, the_file) - try: - now = datetime.datetime.now() - creation_date = datetime.datetime.utcfromtimestamp(os.path.getctime(file_path)) - is_older_than_period = (now - creation_date).days >= period - if is_older_than_period: - if os.path.isfile(file_path) and not the_file in excluded_files: - os.unlink(file_path) - elif os.path.isdir(file_path): - shutil.rmtree(file_path) - except Exception as e: - log.error(e) + now = datetime.datetime.now() + creation_date = datetime.datetime.utcfromtimestamp(os.path.getctime(file_path)) + is_older_than_period = (now - creation_date).days >= period + if is_older_than_period: + if os.path.isfile(file_path) and not the_file in excluded_files: + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) -def generate_pdf(template, data, filename): +def generate_pdf(template, data): + # flask render a template by name with the given context template_rendered = render_template(template, data=data) + # weasyprint HTML document parsed html_file = HTML( string=template_rendered, base_url=current_app.config["API_ENDPOINT"], encoding="utf-8" ) - file_abs_path = str(Path(current_app.static_folder) / "pdf" / filename) - html_file.write_pdf(file_abs_path) - return file_abs_path + # weasyprint render the document to a PDF file + return html_file.write_pdf() diff --git a/backend/geonature/utils/module.py b/backend/geonature/utils/module.py index ef24d8fffd..5c01382980 100644 --- a/backend/geonature/utils/module.py +++ b/backend/geonature/utils/module.py @@ -1,6 +1,11 @@ import os from pathlib import Path -from pkg_resources import load_entry_point, get_entry_info, iter_entry_points +import sys + +if sys.version_info < (3, 10): + from importlib_metadata import entry_points +else: + from importlib.metadata import entry_points from alembic.script import ScriptDirectory from alembic.migration import MigrationContext @@ -12,36 +17,39 @@ from geonature.core.gn_commons.models import TModules -class NoManifestFound(Exception): - pass +def iter_modules_dist(): + for module_code_entry in set(entry_points(group="gn_module", name="code")): + yield module_code_entry.dist def get_module_config_path(module_code): config_path = os.environ.get(f"GEONATURE_{module_code}_CONFIG_FILE") if config_path: return Path(config_path) - dist = get_dist_from_code(module_code) - config_path = Path(dist.module_path).parent / "config" / "conf_gn_module.toml" + config_path = Path(CONFIG_FILE).parent / f"{module_code.lower()}_config.toml" if config_path.exists(): return config_path - config_path = Path(CONFIG_FILE).parent / f"{module_code.lower()}_config.toml" + dist = get_dist_from_code(module_code) + module_path = Path(sys.modules[dist.entry_points["code"].module].__file__).parent + # module_path is commonly backend/gn_module_XXX/ but config dir is at package root + config_path = module_path.parent.parent / "config" / "conf_gn_module.toml" if config_path.exists(): return config_path return None def get_module_config(module_dist): - module_code = load_entry_point(module_dist, "gn_module", "code") - config_schema = load_entry_point(module_dist, "gn_module", "config_schema") + module_code = module_dist.entry_points["code"].load() + config_schema = module_dist.entry_points["config_schema"].load() config = {"MODULE_CODE": module_code, "MODULE_URL": f"/{module_code.lower()}"} config.update(load_and_validate_toml(get_module_config_path(module_code), config_schema)) return config def get_dist_from_code(module_code): - for entry_point in iter_entry_points("gn_module", "code"): - if module_code == entry_point.load(): - return entry_point.dist + for dist in iter_modules_dist(): + if module_code == dist.entry_points["code"].load(): + return dist raise Exception(f"Module with code {module_code} not installed in venv") @@ -78,11 +86,12 @@ def alembic_branch_in_use(branch_name, directory, x_arg): def module_db_upgrade(module_dist, directory=None, sql=False, tag=None, x_arg=[]): - module_code = module_dist.load_entry_point("gn_module", "code") - if "migrations" in module_dist.get_entry_map("gn_module"): + module_code = module_dist.entry_points["code"].load() + module_blueprint = module_dist.entry_points["blueprint"].load() # force discovery of models + if module_dist.entry_points.select(name="migrations"): try: - alembic_branch = module_dist.load_entry_point("gn_module", "alembic_branch") - except ImportError: + alembic_branch = module_dist.entry_points["alembic_branch"].load() + except KeyError: alembic_branch = module_code.lower() else: alembic_branch = None @@ -90,13 +99,17 @@ def module_db_upgrade(module_dist, directory=None, sql=False, tag=None, x_arg=[] if module is None: # add module to database try: - module_picto = module_dist.load_entry_point("gn_module", "picto") - except ImportError: + module_picto = module_dist.entry_points["picto"].load() + except KeyError: module_picto = "fa-puzzle-piece" try: - module_type = module_dist.load_entry_point("gn_module", "type") - except ImportError: + module_type = module_dist.entry_points["type"].load() + except KeyError: module_type = None + try: + module_doc_url = module_dist.entry_points["doc_url"].load() + except KeyError: + module_doc_url = None module = TModules( type=module_type, module_code=module_code, @@ -104,6 +117,7 @@ def module_db_upgrade(module_dist, directory=None, sql=False, tag=None, x_arg=[] module_path=module_code.lower(), module_target="_self", module_picto=module_picto, + module_doc_url=module_doc_url, active_frontend=True, active_backend=True, ng_module=module_code.lower(), diff --git a/backend/geonature/utils/schema.py b/backend/geonature/utils/schema.py new file mode 100644 index 0000000000..0d99bd0ae1 --- /dev/null +++ b/backend/geonature/utils/schema.py @@ -0,0 +1,22 @@ +from marshmallow import fields + +from geonature.core.gn_permissions.tools import get_scopes_by_action + + +class CruvedSchemaMixin: + """ + This mixin add a cruved field which serialize to a dict "{action: boolean}". + example: {"C": False, "R": True, "U": True, "V": False, "E": True, "D": False} + The schema must have a __module_code__ property (and optionally a __object_code__property) + to indicate from which permissions must be verified. + The model must have an has_instance_permission method which take the scope and retrurn a boolean. + The cruved field is excluded by default and may be added to serialization with only=["+cruved"]. + """ + + cruved = fields.Method("get_cruved", metadata={"exclude": True}) + + def get_cruved(self, obj): + module_code = self.__module_code__ + object_code = getattr(self, "__object_code__", None) + scopes = get_scopes_by_action(module_code=module_code, object_code=object_code) + return {action: obj.has_instance_permission(scope) for action, scope in scopes.items()} diff --git a/backend/geonature/utils/sentry.py b/backend/geonature/utils/sentry.py new file mode 100644 index 0000000000..032d7546b3 --- /dev/null +++ b/backend/geonature/utils/sentry.py @@ -0,0 +1,16 @@ +from contextlib import nullcontext + +from flask import current_app + + +def start_sentry_child(*args, **kwargs): + if not current_app.config.get("SENTRY_DSN"): + return nullcontext() + + from sentry_sdk import Hub + + span = Hub.current.scope.span + if span is None: + return nullcontext() + + return span.start_child(*args, **kwargs) diff --git a/backend/geonature/utils/utilsgeometrytools.py b/backend/geonature/utils/utilsgeometrytools.py index 2dd9bd59ee..bd2f21770b 100644 --- a/backend/geonature/utils/utilsgeometrytools.py +++ b/backend/geonature/utils/utilsgeometrytools.py @@ -39,12 +39,14 @@ def export_as_geo_file(export_format, export_view, db_cols, geojson_col, data, f """ if export_format == "gpkg": geo_format = "gpkg" - dir_path = str(Path(current_app.static_folder) / "geopackages") + dir_path = Path(current_app.config["MEDIA_FOLDER"]) / "geopackages" dwn_extension = "gpkg" elif export_format == "shapefile": geo_format = "shp" - dir_path = str(Path(current_app.static_folder) / "shapefiles") + dir_path = Path(current_app.config["MEDIA_FOLDER"]) / "shapefiles" dwn_extension = "zip" + dir_path.mkdir(parents=True, exist_ok=True) + dir_path = str(dir_path) filemanager.delete_recursively(dir_path, excluded_files=[".gitkeep"]) export_view.as_geofile( diff --git a/backend/geonature/utils/utilssqlalchemy.py b/backend/geonature/utils/utilssqlalchemy.py index 88a2b7f1aa..195786a3bd 100644 --- a/backend/geonature/utils/utilssqlalchemy.py +++ b/backend/geonature/utils/utilssqlalchemy.py @@ -444,7 +444,7 @@ def serializefn(self, recursif=False, columns=(), relationships=()): if recursif is False: return out - for (rel, uselist) in selected_relationship: + for rel, uselist in selected_relationship: if getattr(self, rel): if uselist is True: out[rel] = [ diff --git a/backend/static/geopackages/.gitkeep b/backend/media/attachments/.gitkeep similarity index 100% rename from backend/static/geopackages/.gitkeep rename to backend/media/attachments/.gitkeep diff --git a/backend/static/exports/.gitignore b/backend/media/exports/.gitignore similarity index 100% rename from backend/static/exports/.gitignore rename to backend/media/exports/.gitignore diff --git a/backend/static/medias/.gitkeep b/backend/media/geopackages/.gitkeep similarity index 100% rename from backend/static/medias/.gitkeep rename to backend/media/geopackages/.gitkeep diff --git a/backend/static/mobile/.gitignore b/backend/media/mobile/.gitignore similarity index 100% rename from backend/static/mobile/.gitignore rename to backend/media/mobile/.gitignore diff --git a/backend/static/pdf/.gitignore b/backend/media/pdf/.gitignore similarity index 100% rename from backend/static/pdf/.gitignore rename to backend/media/pdf/.gitignore diff --git a/backend/static/shapefiles/.gitkeep b/backend/media/shapefiles/.gitkeep similarity index 100% rename from backend/static/shapefiles/.gitkeep rename to backend/media/shapefiles/.gitkeep diff --git a/backend/requirements-common.in b/backend/requirements-common.in index 85d9816b70..c942f32d0f 100644 --- a/backend/requirements-common.in +++ b/backend/requirements-common.in @@ -1,16 +1,19 @@ celery[redis] click>=7.0 -fiona +fiona>=1.8.22,<1.9 flask +flask-admin flask-cors flask-mail -flask-marshmallow +flask-marshmallow<0.15.0 flask-migrate flask-sqlalchemy flask-weasyprint flask_wtf geojson gunicorn>=19.8.0 +importlib_metadata==4.13.0;python_version<"3.10" +importlib_metadata;python_version>"3.10" lxml marshmallow marshmallow-sqlalchemy diff --git a/backend/requirements-dependencies.in b/backend/requirements-dependencies.in index 3b3093fe85..2c0a963d4c 100644 --- a/backend/requirements-dependencies.in +++ b/backend/requirements-dependencies.in @@ -1,7 +1,7 @@ -pypnusershub>=1.6.4,<2.0.0 -pypnnomenclature>=1.5.3,<2.0.0 -pypn_habref_api>=0.3.1,<1.0.0 -utils-flask-sqlalchemy-geo>=0.2.6,<1.0.0 -utils-flask-sqlalchemy>=0.3.1,<1.0.0 -taxhub>=1.10.7,<2.0.0 -pypn-ref-geo>=1.2.1,<2.0.0 +pypnusershub>=1.6.10,<2 +pypnnomenclature>=1.5.4,<2 +pypn_habref_api>=0.3.2,<1 +utils-flask-sqlalchemy-geo>=0.2.8,<1 +utils-flask-sqlalchemy>=0.3.6,<1 +taxhub>=1.12.1,<2 +pypn-ref-geo>=1.4.0,<2 diff --git a/backend/requirements-dev.txt b/backend/requirements-dev.txt index 2ba1aca330..0ab217e24f 100644 --- a/backend/requirements-dev.txt +++ b/backend/requirements-dev.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.7 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.7 +# by the following command: # -# pip-compile requirements-dev.in +# pip-compile --resolver=backtracking requirements-dev.in # -e file:dependencies/Habref-api-module#egg=pypn_habref_api # via -r requirements-submodules.in @@ -37,42 +37,42 @@ # via # -r requirements-submodules.in # pypn-ref-geo -alembic==1.8.1 +alembic==1.12.0 # via # flask-migrate # pypn-ref-geo # pypnusershub amqp==5.1.1 # via kombu -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis -attrs==22.1.0 +attrs==23.1.0 # via fiona -authlib==1.0.1 +authlib==1.2.1 # via pypnusershub -bcrypt==4.0.0 +bcrypt==4.0.1 # via pypnusershub billiard==3.6.4.0 # via celery -blinker==1.5 +blinker==1.6.2 # via flask-mail -boto3==1.24.64 +boto3==1.28.47 # via taxhub -botocore==1.27.64 +botocore==1.31.47 # via # boto3 # s3transfer cached-property==1.5.2 # via kombu -cairocffi==1.3.0 +cairocffi==1.6.1 # via # cairosvg # weasyprint -cairosvg==2.5.2 +cairosvg==2.7.1 # via weasyprint celery[redis]==5.2.7 # via -r requirements-common.in -certifi==2022.6.15 +certifi==2023.7.22 # via # fiona # requests @@ -81,9 +81,9 @@ cffi==1.15.1 # cairocffi # cryptography # weasyprint -charset-normalizer==2.1.1 +charset-normalizer==3.2.0 # via requests -click==8.1.3 +click==8.1.7 # via # -r requirements-common.in # celery @@ -100,13 +100,13 @@ click-plugins==1.1.1 # via # celery # fiona -click-repl==0.2.0 +click-repl==0.3.0 # via celery cligj==0.7.2 # via fiona -cryptography==37.0.4 +cryptography==41.0.3 # via authlib -cssselect2==0.6.0 +cssselect2==0.7.0 # via # cairosvg # weasyprint @@ -114,17 +114,15 @@ decorator==5.1.1 # via validators defusedxml==0.7.1 # via cairosvg -deprecated==1.2.13 - # via redis -dnspython==2.2.1 +dnspython==2.3.0 # via email-validator -email-validator==1.3.0 +email-validator==2.0.0.post2 # via wtforms-components -fiona==1.8.21 +fiona==1.8.22 # via # -r requirements-common.in # utils-flask-sqlalchemy-geo -flask==2.2.2 +flask==2.2.5 # via # -r requirements-common.in # flask-admin @@ -136,14 +134,17 @@ flask==2.2.2 # flask-weasyprint # flask-wtf # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # pypnusershub # taxhub # usershub # utils-flask-sqlalchemy -flask-admin==1.6.0 - # via pypnnomenclature -flask-cors==3.0.10 +flask-admin==1.6.1 + # via + # -r requirements-common.in + # pypnnomenclature +flask-cors==4.0.0 # via # -r requirements-common.in # taxhub @@ -155,7 +156,7 @@ flask-marshmallow==0.14.0 # pypn-habref-api # pypnnomenclature # pypnusershub -flask-migrate==3.1.0 +flask-migrate==4.0.5 # via # -r requirements-common.in # pypn-habref-api @@ -176,37 +177,41 @@ flask-sqlalchemy==2.5.1 # utils-flask-sqlalchemy flask-weasyprint==1.0.0 # via -r requirements-common.in -flask-wtf==1.0.1 +flask-wtf==1.1.1 # via # -r requirements-common.in # usershub geoalchemy2==0.11.1 # via utils-flask-sqlalchemy-geo -geojson==2.5.0 +geojson==3.0.1 # via # -r requirements-common.in # utils-flask-sqlalchemy-geo -gunicorn==20.1.0 +gunicorn==21.2.0 # via # -r requirements-common.in # taxhub # usershub html5lib==1.1 # via weasyprint -idna==3.3 +idna==3.4 # via # email-validator # requests -importlib-metadata==4.12.0 +importlib-metadata==4.13.0 ; python_version < "3.10" # via + # -r requirements-common.in # alembic + # attrs # celery # click # flask + # gunicorn # kombu # mako + # munch # redis -importlib-resources==5.9.0 +importlib-resources==5.12.0 # via alembic infinity==1.5 # via intervals @@ -224,25 +229,28 @@ jmespath==1.0.1 # botocore kombu==5.2.4 # via celery -lxml==4.9.1 +lxml==4.9.3 # via -r requirements-common.in -mako==1.2.2 +mako==1.2.4 # via alembic -markupsafe==2.1.1 +markupsafe==2.1.3 # via # jinja2 # mako # werkzeug # wtforms # wtforms-components -marshmallow==3.17.1 +marshmallow==3.19.0 # via # -r requirements-common.in # flask-marshmallow + # marshmallow-geojson # marshmallow-sqlalchemy # pypn-habref-api # utils-flask-sqlalchemy -marshmallow-sqlalchemy==0.28.1 +marshmallow-geojson==0.4.0 + # via utils-flask-sqlalchemy-geo +marshmallow-sqlalchemy==0.28.2 # via # -r requirements-common.in # pypn-habref-api @@ -250,36 +258,36 @@ marshmallow-sqlalchemy==0.28.1 # pypnusershub # taxhub # usershub -munch==2.5.0 + # utils-flask-sqlalchemy-geo +munch==4.0.0 # via fiona -packaging==21.3 +packaging==23.1 # via # -r requirements-common.in # geoalchemy2 + # gunicorn # marshmallow # marshmallow-sqlalchemy - # redis -pillow==9.2.0 +pillow==9.5.0 # via # -r requirements-common.in # cairosvg # taxhub # weasyprint -prompt-toolkit==3.0.31 +prompt-toolkit==3.0.39 # via click-repl -psycopg2==2.9.3 +psycopg2==2.9.7 # via # -r requirements-common.in # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # pypnusershub # taxhub # usershub pycparser==2.21 # via cffi -pyparsing==3.0.9 - # via packaging -pyphen==0.13.0 +pyphen==0.14.0 # via weasyprint python-dateutil==2.8.2 # via @@ -287,34 +295,33 @@ python-dateutil==2.8.2 # botocore # usershub # utils-flask-sqlalchemy -python-dotenv==0.20.0 +python-dotenv==0.21.1 # via # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # taxhub # usershub -pytz==2022.2.1 +pytz==2023.3.post1 # via celery -redis==4.3.4 +redis==5.0.0 # via celery -requests==2.28.1 +requests==2.31.0 # via # pypn-habref-api # pypnusershub -s3transfer==0.6.0 +s3transfer==0.6.2 # via boto3 -shapely==1.8.4 +shapely==1.8.5.post1 # via # -r requirements-common.in + # pypnnomenclature # utils-flask-sqlalchemy-geo six==1.16.0 # via - # click-repl # fiona - # flask-cors # flask-marshmallow # html5lib - # munch # python-dateutil # wtforms-components sqlalchemy==1.3.24 @@ -324,24 +331,28 @@ sqlalchemy==1.3.24 # flask-sqlalchemy # geoalchemy2 # marshmallow-sqlalchemy + # pypn-habref-api # pypn-ref-geo + # pypnnomenclature # pypnusershub # taxhub + # utils-flask-sqlalchemy # utils-flask-sqlalchemy-geo # wtforms-sqlalchemy -tinycss2==1.1.1 +tinycss2==1.2.1 # via # cairosvg # cssselect2 # weasyprint toml==0.10.2 # via -r requirements-common.in -typing-extensions==4.3.0 +typing-extensions==4.7.1 # via + # alembic # async-timeout # importlib-metadata # redis -urllib3==1.26.12 +urllib3==1.26.16 # via # botocore # requests @@ -353,7 +364,7 @@ vine==5.0.0 # amqp # celery # kombu -wcwidth==0.2.5 +wcwidth==0.2.6 # via prompt-toolkit weasyprint==52.5 # via @@ -364,10 +375,8 @@ webencodings==0.5.1 # cssselect2 # html5lib # tinycss2 -werkzeug==2.2.2 +werkzeug==2.2.3 # via flask -wrapt==1.14.1 - # via deprecated wtforms==3.0.1 # via # -r requirements-common.in @@ -382,7 +391,7 @@ wtforms-sqlalchemy==0.3 # via -r requirements-common.in xmltodict==0.13.0 # via -r requirements-common.in -zipp==3.8.1 +zipp==3.15.0 # via # importlib-metadata # importlib-resources diff --git a/backend/requirements.txt b/backend/requirements.txt index b3aac288d8..6e9ae6e13b 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -2,44 +2,44 @@ # This file is autogenerated by pip-compile with Python 3.7 # by the following command: # -# pip-compile requirements.in +# pip-compile --resolver=backtracking requirements.in # -alembic==1.8.1 +alembic==1.12.0 # via # flask-migrate # pypn-ref-geo # pypnusershub amqp==5.1.1 # via kombu -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis -attrs==22.1.0 +attrs==23.1.0 # via fiona -authlib==1.0.1 +authlib==1.2.1 # via pypnusershub -bcrypt==4.0.0 +bcrypt==4.0.1 # via pypnusershub billiard==3.6.4.0 # via celery -blinker==1.5 +blinker==1.6.2 # via flask-mail -boto3==1.24.64 +boto3==1.28.47 # via taxhub -botocore==1.27.64 +botocore==1.31.47 # via # boto3 # s3transfer cached-property==1.5.2 # via kombu -cairocffi==1.3.0 +cairocffi==1.6.1 # via # cairosvg # weasyprint -cairosvg==2.5.2 +cairosvg==2.7.1 # via weasyprint celery[redis]==5.2.7 # via -r requirements-common.in -certifi==2022.6.15 +certifi==2023.7.22 # via # fiona # requests @@ -48,9 +48,9 @@ cffi==1.15.1 # cairocffi # cryptography # weasyprint -charset-normalizer==2.1.1 +charset-normalizer==3.2.0 # via requests -click==8.1.3 +click==8.1.7 # via # -r requirements-common.in # celery @@ -67,25 +67,23 @@ click-plugins==1.1.1 # via # celery # fiona -click-repl==0.2.0 +click-repl==0.3.0 # via celery cligj==0.7.2 # via fiona -cryptography==37.0.4 +cryptography==41.0.3 # via authlib -cssselect2==0.6.0 +cssselect2==0.7.0 # via # cairosvg # weasyprint defusedxml==0.7.1 # via cairosvg -deprecated==1.2.13 - # via redis -fiona==1.8.21 +fiona==1.8.22 # via # -r requirements-common.in # utils-flask-sqlalchemy-geo -flask==2.2.2 +flask==2.2.5 # via # -r requirements-common.in # flask-admin @@ -97,13 +95,16 @@ flask==2.2.2 # flask-weasyprint # flask-wtf # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # pypnusershub # taxhub # utils-flask-sqlalchemy -flask-admin==1.6.0 - # via pypnnomenclature -flask-cors==3.0.10 +flask-admin==1.6.1 + # via + # -r requirements-common.in + # pypnnomenclature +flask-cors==4.0.0 # via # -r requirements-common.in # taxhub @@ -115,7 +116,7 @@ flask-marshmallow==0.14.0 # pypn-habref-api # pypnnomenclature # pypnusershub -flask-migrate==3.1.0 +flask-migrate==4.0.5 # via # -r requirements-common.in # pypn-habref-api @@ -134,32 +135,36 @@ flask-sqlalchemy==2.5.1 # utils-flask-sqlalchemy flask-weasyprint==1.0.0 # via -r requirements-common.in -flask-wtf==1.0.1 +flask-wtf==1.1.1 # via -r requirements-common.in geoalchemy2==0.11.1 # via utils-flask-sqlalchemy-geo -geojson==2.5.0 +geojson==3.0.1 # via # -r requirements-common.in # utils-flask-sqlalchemy-geo -gunicorn==20.1.0 +gunicorn==21.2.0 # via # -r requirements-common.in # taxhub html5lib==1.1 # via weasyprint -idna==3.3 +idna==3.4 # via requests -importlib-metadata==4.12.0 +importlib-metadata==4.13.0 ; python_version < "3.10" # via + # -r requirements-common.in # alembic + # attrs # celery # click # flask + # gunicorn # kombu # mako + # munch # redis -importlib-resources==5.9.0 +importlib-resources==5.12.0 # via alembic itsdangerous==2.1.2 # via @@ -173,69 +178,72 @@ jmespath==1.0.1 # botocore kombu==5.2.4 # via celery -lxml==4.9.1 +lxml==4.9.3 # via -r requirements-common.in -mako==1.2.2 +mako==1.2.4 # via alembic -markupsafe==2.1.1 +markupsafe==2.1.3 # via # jinja2 # mako # werkzeug # wtforms -marshmallow==3.17.1 +marshmallow==3.19.0 # via # -r requirements-common.in # flask-marshmallow + # marshmallow-geojson # marshmallow-sqlalchemy # pypn-habref-api # utils-flask-sqlalchemy -marshmallow-sqlalchemy==0.28.1 +marshmallow-geojson==0.4.0 + # via utils-flask-sqlalchemy-geo +marshmallow-sqlalchemy==0.28.2 # via # -r requirements-common.in # pypn-habref-api # pypnnomenclature # pypnusershub # taxhub -munch==2.5.0 + # utils-flask-sqlalchemy-geo +munch==4.0.0 # via fiona -packaging==21.3 +packaging==23.1 # via # -r requirements-common.in # geoalchemy2 + # gunicorn # marshmallow # marshmallow-sqlalchemy - # redis -pillow==9.2.0 +pillow==9.5.0 # via # -r requirements-common.in # cairosvg # taxhub # weasyprint -prompt-toolkit==3.0.31 +prompt-toolkit==3.0.39 # via click-repl -psycopg2==2.9.3 +psycopg2==2.9.7 # via # -r requirements-common.in # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # pypnusershub # taxhub pycparser==2.21 # via cffi -pyparsing==3.0.9 - # via packaging -pyphen==0.13.0 +pyphen==0.14.0 # via weasyprint -pypn-habref-api==0.3.1 +pypn-habref-api==0.3.2 # via -r requirements-dependencies.in -pypn-ref-geo==1.2.1 +pypn-ref-geo==1.4.0 # via # -r requirements-dependencies.in # taxhub -pypnnomenclature==1.5.3 +pypnnomenclature==1.5.4 # via -r requirements-dependencies.in -pypnusershub==1.6.4 +pypnusershub==1.6.10 # via # -r requirements-dependencies.in # pypnnomenclature @@ -245,33 +253,32 @@ python-dateutil==2.8.2 # -r requirements-common.in # botocore # utils-flask-sqlalchemy -python-dotenv==0.20.0 +python-dotenv==0.21.1 # via # pypn-habref-api + # pypn-ref-geo # pypnnomenclature # taxhub -pytz==2022.2.1 +pytz==2023.3.post1 # via celery -redis==4.3.4 +redis==5.0.0 # via celery -requests==2.28.1 +requests==2.31.0 # via # pypn-habref-api # pypnusershub -s3transfer==0.6.0 +s3transfer==0.6.2 # via boto3 -shapely==1.8.4 +shapely==1.8.5.post1 # via # -r requirements-common.in + # pypnnomenclature # utils-flask-sqlalchemy-geo six==1.16.0 # via - # click-repl # fiona - # flask-cors # flask-marshmallow # html5lib - # munch # python-dateutil sqlalchemy==1.3.24 # via @@ -280,33 +287,37 @@ sqlalchemy==1.3.24 # flask-sqlalchemy # geoalchemy2 # marshmallow-sqlalchemy + # pypn-habref-api # pypn-ref-geo + # pypnnomenclature # pypnusershub # taxhub + # utils-flask-sqlalchemy # utils-flask-sqlalchemy-geo # wtforms-sqlalchemy -taxhub==1.10.7 +taxhub==1.12.1 # via # -r requirements-dependencies.in # pypnnomenclature -tinycss2==1.1.1 +tinycss2==1.2.1 # via # cairosvg # cssselect2 # weasyprint toml==0.10.2 # via -r requirements-common.in -typing-extensions==4.3.0 +typing-extensions==4.7.1 # via + # alembic # async-timeout # importlib-metadata # redis -urllib3==1.26.12 +urllib3==1.26.16 # via # botocore # requests # taxhub -utils-flask-sqlalchemy==0.3.1 +utils-flask-sqlalchemy==0.3.6 # via # -r requirements-dependencies.in # pypn-habref-api @@ -315,7 +326,7 @@ utils-flask-sqlalchemy==0.3.1 # pypnusershub # taxhub # utils-flask-sqlalchemy-geo -utils-flask-sqlalchemy-geo==0.2.6 +utils-flask-sqlalchemy-geo==0.2.8 # via # -r requirements-dependencies.in # pypn-ref-geo @@ -324,7 +335,7 @@ vine==5.0.0 # amqp # celery # kombu -wcwidth==0.2.5 +wcwidth==0.2.6 # via prompt-toolkit weasyprint==52.5 # via @@ -335,10 +346,8 @@ webencodings==0.5.1 # cssselect2 # html5lib # tinycss2 -werkzeug==2.2.2 +werkzeug==2.2.3 # via flask -wrapt==1.14.1 - # via deprecated wtforms==3.0.1 # via # -r requirements-common.in @@ -349,7 +358,7 @@ wtforms-sqlalchemy==0.3 # via -r requirements-common.in xmltodict==0.13.0 # via -r requirements-common.in -zipp==3.8.1 +zipp==3.15.0 # via # importlib-metadata # importlib-resources diff --git a/frontend/src/assets/custom.sample.css b/backend/static/css/frontend.css similarity index 100% rename from frontend/src/assets/custom.sample.css rename to backend/static/css/frontend.css diff --git a/backend/static/css/hide_select2_options.css b/backend/static/css/hide_select2_options.css new file mode 100644 index 0000000000..08f7bea161 --- /dev/null +++ b/backend/static/css/hide_select2_options.css @@ -0,0 +1,3 @@ +.select2-disabled { + display: none !important; +} \ No newline at end of file diff --git a/backend/static/css/metadata_pdf.css b/backend/static/css/metadata_pdf.css index 07cd2c228c..5e4f882eae 100644 --- a/backend/static/css/metadata_pdf.css +++ b/backend/static/css/metadata_pdf.css @@ -161,7 +161,7 @@ div.titre { height:600px; } -.zone, .repartition { +.zone { height: 40%; } @@ -247,26 +247,3 @@ div.titre { font-size: 10px; margin: 2px 0 0 10px; } - -/* DEFAUT */ - -/* FIN DEFAUT */ - - -/* FIN SINP */ - - -/* DEPOBIO */ -/* #ligne-titre-depobio, -#ligne-ca-initial-depobio { - background-color: #07A9E7; -} - -#nombre-taxons-depobio { - background-color: #DDF5FE; -} - -#nombre-donnees-depobio { - background-color: #63D4C1; -} */ -/* FIN DEPOBIO */ diff --git a/backend/static/css/custom.css b/backend/static/css/metadata_pdf_custom.css similarity index 100% rename from backend/static/css/custom.css rename to backend/static/css/metadata_pdf_custom.css diff --git a/frontend/src/favicon.ico b/backend/static/images/favicon.ico similarity index 100% rename from frontend/src/favicon.ico rename to backend/static/images/favicon.ico diff --git a/frontend/src/custom/images/login_background.jpg b/backend/static/images/login_background.jpg similarity index 100% rename from frontend/src/custom/images/login_background.jpg rename to backend/static/images/login_background.jpg diff --git a/frontend/src/custom/images/logo_sidebar.jpg b/backend/static/images/logo_sidebar.jpg similarity index 100% rename from frontend/src/custom/images/logo_sidebar.jpg rename to backend/static/images/logo_sidebar.jpg diff --git a/backend/static/images/logo_structure.jpg b/backend/static/images/logo_structure.jpg deleted file mode 120000 index 3616fb08ac..0000000000 --- a/backend/static/images/logo_structure.jpg +++ /dev/null @@ -1 +0,0 @@ -../../../frontend/src/custom/images/logo_sidebar.jpg \ No newline at end of file diff --git a/frontend/src/custom/images/logo_structure.png b/backend/static/images/logo_structure.png similarity index 100% rename from frontend/src/custom/images/logo_structure.png rename to backend/static/images/logo_structure.png diff --git a/backend/static/js/hide_unnecessary_filters.js b/backend/static/js/hide_unnecessary_filters.js new file mode 100644 index 0000000000..b090943de2 --- /dev/null +++ b/backend/static/js/hide_unnecessary_filters.js @@ -0,0 +1,56 @@ +let roles = [] + +$('#availability').on('change', function() { + let selected = $(this).find(":selected")[0]; + if (selected && selected.hasAttribute("sensitivity_filter")) + $("#sensitivity_filter").parent().show(); + else + $("#sensitivity_filter").parent().hide(); + $('#sensitivity_filter').prop('checked', false); + + if (selected && selected.hasAttribute("scope_filter")) + $("#scope").parent().show(); + else { + $("#scope").parent().hide(); + $("#scope").val("__None").trigger("change"); + } +}); + +$('#availability').trigger('change'); + +$(document).ajaxSuccess(function(event, xhr, options){ + roles = xhr.responseJSON; +}); + +let startingRole = $("#role").attr('data-json'); +let startingAvailability = $("#availability :selected").val(); +if (startingRole) { + startingRole = JSON.parse(startingRole); + hideAvailability(startingRole); +} + +$("#role").on('change', function() { + let data = $("#role").select2("data"); + $("#availability option").prop('disabled', false); + if (data) { + let role = roles.find(r => r[0] === data.id); + hideAvailability(role) + } +}); + +function hideAvailability(role) { + if (Array.isArray(role[2])) { + for (i in role[2]) { + const key = role[2][i]; + const selectedOption = $('#availability').select2("data"); + let option = $("#availability option").filter((i, e) => {return $(e).val() === key}); + + if (!(startingAvailability === key && role[0] === startingRole[0])) + option.prop('disabled', true); + + if (selectedOption && selectedOption.id === key && option.prop('disabled')) { + $("#availability").select2("val", "").trigger('change'); + } + } + } +} diff --git a/config/default_config.toml.example b/config/default_config.toml.example index 6e80c627b2..988d6bc3ad 100644 --- a/config/default_config.toml.example +++ b/config/default_config.toml.example @@ -1,5 +1,5 @@ ############################################################################### -# Liste complète des paramètres et de leurs valeurs par défaut +# Liste (presque) complète des paramètres et de leurs valeurs par défaut # # UTILISATION : piochez les paramètres que vous souhaitez surcoucher dans ce # fichier et intégrez les au fichier 'config/geonature_config.toml'. @@ -8,9 +8,6 @@ # doivent être au début du fichier ! # Les paramètres dans une section doivent être maintenus dans leur section. # -# NOTES : après toute modification dans ce fichier, il est nécessaire de -# mettre à jour GeoNature à l'aide de la commande : -# `geonature update_configuration` ############################################################################### # Base de données @@ -26,10 +23,7 @@ SQLALCHEMY_TRACK_MODIFICATIONS = true # Application # Nom de l'application dans la page d'accueil. -appName = "GeoNature 2" - -# Nom du fichier du logo present dans le dossier custom/images -LOGO_STRUCTURE_FILE = 'logo_structure.png' +appName = "GeoNature" # Langue par défaut utilisée par l'application. # Utilisée pour l'instant seulement avec les nomenclatures. @@ -50,9 +44,6 @@ API_ENDPOINT = "http://url.com/geonature/api" # URL de l'API de Taxhub API_TAXHUB = "http://127.0.0.1:5000/api/" -# Identifiant de l'appplication GeoNature (id_application) dans UsersHub -ID_APPLICATION_GEONATURE = 3 - # Type de session SESSION_TYPE = "filesystem" @@ -68,29 +59,22 @@ COOKIE_AUTORENEW = true # Capturer toutes les exceptions (=true) ou pas (=false) TRAP_ALL_EXCEPTIONS = false -# Niveau de Log pour l'API. Par défaut ERROR (=40) -# Cf. https://docs.python.org/3/library/logging.html#logging-levels -API_LOG_LEVEL = 40 - # MTD (pour la connexion au webservice de métadonnées de l'INPN) XML_NAMESPACE = "{http://inpn.mnhn.fr/mtd}" MTD_API_ENDPOINT = "https://preprod-inpn.mnhn.fr/mtd" -# Fichiers -# BASE_DIR = os.path.abspath(os.path.dirname(__file__)) -# Chemin du dossier où stocker les fichiers depuis la racine du dossier 'backend' -UPLOAD_FOLDER = "static/medias" - -# Configuration de l'accès sans authentication +# Configuration de l'accès sans authentication. Renseigner l’identifiant de l’utilisateur public. PUBLIC_ACCESS_USERNAME = "" # Active (=true) ou pas (=false) l'envoi d'email pour chaque erreur du backend de GeoNature # Attention : si activé, il est nécessaire de remplir la secetion 'MAIL_CONFIG'. MAIL_ON_ERROR = false -# Notifications +# Notifications (true / false) NOTIFICATIONS_ENABLED = true +MEDIA_CLEAN_CRONTAB = "0 1 * * *" + [USERSHUB] # URL de l'application Usershub URL_USERSHUB = "http://127.0.0.1:5001" @@ -143,7 +127,8 @@ NOTIFICATIONS_ENABLED = true # Modules auxquels les JDD sont automatiquement associés JDD_MODULE_CODE_ASSOCIATION = ["OCCTAX", "OCCHAB"] # Filter les JDD par id_instance - ID_INSTANCE_FILTER = null + # ID_INSTANCE_FILTER = "null" + SYNC_LOG_LEVEL = "INFO" [BDD] id_area_type_municipality = 25 @@ -204,6 +189,9 @@ NOTIFICATIONS_ENABLED = true # Zoom appliqué sur la carte lors du clic sur une liste ZOOM_ON_CLICK = 16 + # activation d'un boutton de géolocalisation sur la carte + GEOLOCATION = true + # Restreindre la recherche OpenStreetMap (sur la carte dans l'encart "Rechercher un lieu") # à certains pays. Les pays doivent être au format ISO_3166-1 : # https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 et séparés par une virgule. @@ -248,9 +236,9 @@ NOTIFICATIONS_ENABLED = true url = "//{s}.google.com/vt/lyrs=s&x={x}&y={y}&z={z}" subdomains = ["mt0", "mt1", "mt2", "mt3"] -# There are no defaut layers, but this document how to add one: +# Add base layers on maps (from ref_geo, WMS, WFS, GeoJSON...) #[[MAPCONFIG.REF_LAYERS]] -# code = "COM", +# code = "COM" # label = "Communes" # type = "area" # activate = false @@ -273,7 +261,7 @@ NOTIFICATIONS_ENABLED = true # Colonne à afficher par défaut sur la liste des résultats de la synthese # Choisir le champ 'prop' parmis les colonnes suivantes : - # id_synthese, date_min, cd_nom, lb_nom, nom_vern_or_lb_nom, st_asgeojson, + # id (=id_synthese), date_min, cd_nom, lb_nom, nom_vern_or_lb_nom, # observers, dataset_name, url_source, count_min_max LIST_COLUMNS_FRONTEND = [ { prop = "nom_vern_or_lb_nom", name = "Taxon" }, @@ -414,7 +402,7 @@ NOTIFICATIONS_ENABLED = true "uuid_perm_grp_sinp", "date_creation", "date_modification", - "champs_additionnels + "champs_additionnels", ] DISCUSSION_MODULES = ["SYNTHESE","VALIDATION"] DISCUSSION_MAX_LENGTH = 1500 @@ -437,6 +425,31 @@ NOTIFICATIONS_ENABLED = true { "id" = "znief", "show" = true, "display_name" = "Espèces déterminantes ZNIEFF", "status_types" = ["ZDET"] }, ] + # Enable areas vizualisation with toggle slide + AREA_AGGREGATION_ENABLED = true + # Choose size of areas + AREA_AGGREGATION_TYPE = "M10" + # Activate areas mode by default + AREA_AGGREGATION_BY_DEFAULT = false + # Areas legend classes to use + AREA_AGGREGATION_LEGEND_CLASSES = [ + { min = 100, color = "#800026" }, + { min = 50, color = "#BD0026" }, + { min = 20, color = "#E31A1C" }, + { min = 10, color = "#FC4E2A" }, + { min = 5, color = "#FD8D3C" }, + { min = 2, color = "#FEB24C" }, + { min = 1, color = "#FED976" }, + { min = 0, color = "#FFEDA0" } + ] + + [SYNTHESE.DEFAULT_FILTERS] + # Tous les statuts de validation sauf invalide '4' + # Il faut bien les renseigner en tant que chaîne de caractère (et non en tant que nombre) + cd_nomenclature_valid_status = ['0', '1', '2', '3', '5', '6'] + # Seulement les données de présence + cd_nomenclature_observation_status = ['Pr'] + # Gestion des demandes d'inscription [ACCOUNT_MANAGEMENT] # Activer l'affichage du lien vers le formulaire d'inscription @@ -464,7 +477,7 @@ NOTIFICATIONS_ENABLED = true # Les espaces présents en début et fin de chaine ainsi qu'avant et # après les virgules sont ignorés. VALIDATOR_EMAIL = """ - Théo LECHEMIA , + Théo LECHEMIA , Carl von LINNÉ """ @@ -482,7 +495,7 @@ NOTIFICATIONS_ENABLED = true [[ACCOUNT_MANAGEMENT.ACCOUNT_FORM]] type_widget = "checkbox" attribut_label = """ - + J'ai lu et j'accepte la charte """ attribut_name = "validate_charte" @@ -557,7 +570,7 @@ NOTIFICATIONS_ENABLED = true MAIL_CONTENT_AF_CLOSED_URL = "" MAIL_CONTENT_AF_CLOSED_GREETINGS = "" CLOSED_MODAL_LABEL = "Dépôt d'un cadre d'acquisition" - CLOSED_MODAL_CONTENT = L'action de dépôt est une action irréversible. Il ne sera plus possible d'ajouter des jeux de données au cadre d'acquisition par la suite. Vous ne pourrez pas supprimer votre dépôt. " + CLOSED_MODAL_CONTENT = "L'action de dépôt est une action irréversible. Il ne sera plus possible d'ajouter des jeux de données au cadre d'acquisition par la suite. Vous ne pourrez pas supprimer votre dépôt. " # liste des types de role à afficher sur les formulaires JDD et CA CD_NOMENCLATURE_ROLE_TYPE_DS = ["2"] CD_NOMENCLATURE_ROLE_TYPE_AF = ["3"] @@ -567,3 +580,9 @@ NOTIFICATIONS_ENABLED = true { "label" = "Départements", "type_code"= "DEP" }, { "label" = "Régions", "type_code"= "REG" } ] + +# Page d’accueil +[HOME] + TITLE = "Bienvenue dans GeoNature" + INTRODUCTION = "Texte d'introduction, configurable pour le modifier régulièrement ou le masquer" + FOOTER = "" diff --git a/config/geonature_config.toml.sample b/config/geonature_config.toml.sample index b6af5805ad..16ca60b648 100644 --- a/config/geonature_config.toml.sample +++ b/config/geonature_config.toml.sample @@ -19,6 +19,12 @@ DEFAULT_LANGUAGE='fr' ###### compléter les sections ci-dessous à partir du modèle default_config.toml.example ####################################################################################### +# Configuration liée à la page d’accueil +[HOME] +TITLE = "Bienvenue dans GeoNature" +INTRODUCTION = "Texte d'introduction, configurable pour le modifier régulièrement ou le masquer" +FOOTER = "" + # Configuration liée aux ID de BDD [BDD] diff --git a/config/settings.ini.sample b/config/settings.ini.sample index 0174241c40..6d4fd78f23 100755 --- a/config/settings.ini.sample +++ b/config/settings.ini.sample @@ -74,23 +74,3 @@ install_ref_sensitivity=true install_module_validation=true # Installer le module Occurrence d'habitat install_module_occhab=true - - -# Taxonomy, Users and Nomenclatures configuration -################################################# - -# Les scripts SQL de création des schémas 'utilisateurs', 'taxonomie' et 'nomenclature' de GeoNature sont téléchargés depuis le dépôt github des projets UsersHub et TaxHub -# Les trois paramètres ci-dessous visent à indiquer dans quelle version (release, branche ou tag) les scripts doivent être récupérés - -# Est ce que le schéma utilisateurs doit être installé ? (mettre à 'non' que si vos avez un schéma utilisateur dans une autre BDD et que vous avez déjàmis en place un mecanisme de foreign data wrapper) -install_usershub_schema=true -# Définir dans quelle version de UsersHub (release, branche ou tag) prendre le code SQL permettant la création du schéma utilisateurs de la base de données de GeoNature -usershub_release=2.2.1 - -# Définir dans quelle version de TaxHub (release, branche ou tag) prendre le code SQL permettant la création du schéma taxonomie de la base de données de GeoNature -taxhub_release=1.9.4 - -# Proxy - si le serveur sur lequel se trouve GeoNature se trouve derrière un proxy -# laisser vide si vous n'avez pas de proxy -proxy_http= -proxy_https= diff --git a/config/settings.ini_copy b/config/settings.ini_copy deleted file mode 100755 index 0c536b8dc6..0000000000 --- a/config/settings.ini_copy +++ /dev/null @@ -1,109 +0,0 @@ -## Fichier de configuration utilisé lors de l'installation de GeoNature - -# Langue du serveur -# valeur possible : fr_FR.UTF-8, en_US.utf8 -# locale -a pour voir la liste des locales disponible -my_local=fr_FR.UTF-8 - -# URL Configuration -################### - -# My host URL or IP, starting with http and with / at the end -my_url=http://127.0.0.1/ - - -# PostgreSQL Configuration -########################## - -# Drop eventual existing database during installation -drop_apps_db=true - -# DB host -# Attention les scripts d'installation automatique (install_db.sh et install_all.sh) ne fonctionneront -# que si votre BDD est installée en local (localhost). Si vous souhaitez installer votre BDD sur un autre serveur, -# les scripts n'auront pas les droits suffisants pour créer la BDD sur un autre serveur et cela devra être fait manuellement. -db_host=localhost - -# PostgreSQL port -db_port=5432 - -# GeoNature database name -db_name=geonature2db_dev - -# GeoNature database owner username -user_pg=geonatadmin - -# GeoNature database owner password -user_pg_pass=monpassachanger - -# Local projection SRID -srid_local=2154 - -# Default language (locale) -# Availaible language value : fr -default_language=fr - -# Permet l'installation des couches SIG des communes et départements -# Seules les couches de métropole en 2154 sont fournies (false conseillé hors métropole) -install_sig_layers=true - -# Installe les grilles INPN (1, 5 et 10km) -install_grid_layer=true - -# Install default French DEM (Mainland France only - BD alti 250m)) -install_default_dem=false - -# Vectorise dem raster for more performance. This will increse installation duration and take more disk space -vectorise_dem=false - -# Insert sample data (available only in Mainland France with srid_local=2154 ; false otherwise) -add_sample_data=true - -#### Modules GeoNature #### - -# Installer le module validation -install_module_validation=true -# Installer le module Occurrence d'habitat -install_module_occhab=true - - -# Taxonomy, Users and Nomenclatures configuration -################################################# - -# Les scripts SQL de création des schémas 'utilisateurs', 'taxonomie' et 'nomenclature' de GeoNature sont téléchargés depuis le dépôt github des projets UsersHub et TaxHub -# Les trois paramètres ci-dessous visent à indiquer dans quelle version (release, branche ou tag) les scripts doivent être récupérés - -# Est ce que le schéma utilisateurs doit être installé ? (mettre à 'non' que si vos avez un schéma utilisateur dans une autre BDD et que vous avez déjàmis en place un mecanisme de foreign data wrapper) -install_usershub_schema=true -# Définir dans quelle version de UsersHub (release, branche ou tag) prendre le code SQL permettant la création du schéma utilisateurs de la base de données de GeoNature -usershub_release=2.1.3 - -# Définir dans quelle version de TaxHub (release, branche ou tag) prendre le code SQL permettant la création du schéma taxonomie de la base de données de GeoNature -taxhub_release=master - -# Définir dans quelle version de Habref-api-module (release, branche ou tag) prendre le code SQL permettant la création du schéma ref_habitats de la base de données de GeoNature -habref_api_release=0.1.4 - -# Définir dans quelle version du sous-module des nomenclatures (release, branche ou tag) prendre le code SQL permettant la création du schéma 'ref_nomenclatures' de la base de données GeoNature -nomenclature_release=1.3.4 -# Proxy - si le serveur sur lequel se trouve GeoNature se trouve derrière un proxy -# laisser vide si vous n'avez pas de proxy -proxy_http= -proxy_https= - - -# Configuration python -########################### -python_path=/usr/bin/python3 - - -##################### -### Gunicorn settings -##################### - -app_name=geonature2 -venv_dir=venv -gun_num_workers=4 -gun_host=0.0.0.0 -gun_port=8000 -gun_timeout=30 diff --git a/config/test_config.toml b/config/test_config.toml index 9030cd6de2..2e3b415af6 100644 --- a/config/test_config.toml +++ b/config/test_config.toml @@ -52,3 +52,6 @@ REF_LAYERS=[ params= {limit= 2000} }, ] + +[SYNTHESE] +AREA_AGGREGATION_TYPE = "M5" \ No newline at end of file diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py index f1a1e7626b..c8a4df64cf 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py @@ -1,5 +1,7 @@ import datetime import json +import geojson +from marshmallow import EXCLUDE, INCLUDE from flask import ( Blueprint, @@ -8,106 +10,92 @@ send_from_directory, request, render_template, + jsonify, g, ) +from werkzeug.exceptions import BadRequest, Forbidden from geojson import FeatureCollection, Feature from geoalchemy2.shape import from_shape from pypnusershub.db.models import User from shapely.geometry import asShape from sqlalchemy import func, distinct from sqlalchemy.sql import text +from sqlalchemy.orm import raiseload, joinedload from pypnnomenclature.models import TNomenclatures from utils_flask_sqla.response import json_resp, to_csv_resp, to_json_resp from utils_flask_sqla_geo.utilsgeometry import remove_third_dimension +from utils_flask_sqla_geo.utils import geojsonify from utils_flask_sqla_geo.generic import GenericTableGeo +from ref_geo.utils import get_local_srid from geonature.core.gn_permissions import decorators as permissions -from geonature.core.gn_permissions.tools import get_or_fetch_user_cruved -from geonature.core.gn_commons.models import TModules -from geonature.utils.env import DB, ROOT_DIR +from geonature.core.gn_permissions.decorators import login_required +from geonature.core.gn_permissions.tools import get_scopes_by_action +from geonature.core.gn_meta.models import TDatasets as Dataset +from geonature.utils.env import db from geonature.utils.errors import GeonatureApiError from geonature.utils import filemanager from geonature.utils.utilsgeometrytools import export_as_geo_file from .models import ( - OneStation, - TStationsOcchab, - THabitatsOcchab, - DefaultNomenclaturesValue, + Station, + OccurenceHabitat, + DefaultNomenclatureValue, ) -from .query import filter_query_with_cruved +from .schemas import StationSchema -blueprint = Blueprint("occhab", __name__) - - -@blueprint.route("/station", methods=["POST"]) -@permissions.check_cruved_scope("C", True, module_code="OCCHAB") -@json_resp -def post_station(info_role): - """ - Post one occhab station (station + habitats) - .. :quickref: OccHab; +blueprint = Blueprint("occhab", __name__) - Post one occhab station (station + habitats) - :returns: GeoJson - """ - data = dict(request.get_json()) - occ_hab = None - properties = data["properties"] - if "t_habitats" in properties: - occ_hab = properties.pop("t_habitats") - observers_list = None - if "observers" in properties: - observers_list = properties.pop("observers") - - station = TStationsOcchab(**properties) - shape = asShape(data["geometry"]) - two_dimension_geom = remove_third_dimension(shape) - station.geom_4326 = from_shape(two_dimension_geom, srid=4326) - if observers_list is not None: - observers = ( - DB.session.query(User) - .filter(User.id_role.in_(list(map(lambda user: user["id_role"], observers_list)))) - .all() +@blueprint.route("/stations/", methods=["GET"]) +@permissions.check_cruved_scope("R", module_code="OCCHAB", get_scope=True) +def list_stations(scope): + stations = ( + Station.query.filter_by_params(request.args) + .filter_by_scope(scope) + .order_by(Station.date_min.desc()) + .options( + raiseload("*"), + joinedload("observers"), + joinedload("dataset"), ) - for o in observers: - station.observers.append(o) - t_hab_list_object = [] - if occ_hab is not None: - for occ in occ_hab: - if occ["id_habitat"] is None: - occ.pop("id_habitat") - data_attr = [k for k in occ] - for att in data_attr: - if not getattr(THabitatsOcchab, att, False): - occ.pop(att) - t_hab_list_object.append(THabitatsOcchab(**occ)) - - # set habitat complexe - station.is_habitat_complex = len(t_hab_list_object) > 1 - - station.t_habitats = t_hab_list_object - if station.id_station: - user_cruved = get_or_fetch_user_cruved( - session=session, id_role=info_role.id_role, module_code="OCCHAB" + ) + only = [ + "observers", + "dataset", + ] + if request.args.get("habitats", default=False, type=int): + only.extend( + [ + "habitats", + "habitats.habref", + ] + ) + stations = stations.options( + joinedload("habitats").options( + joinedload("habref"), + ), + ) + if request.args.get("nomenclatures", default=False, type=int): + only.extend(Station.__nomenclatures__) + stations = stations.options(*[joinedload(nomenc) for nomenc in Station.__nomenclatures__]) + fmt = request.args.get("format", default="geojson") + if fmt not in ("json", "geojson"): + raise BadRequest("Unsupported format") + if fmt == "json": + return jsonify(StationSchema(only=only).dump(stations.all(), many=True)) + elif fmt == "geojson": + return geojsonify( + StationSchema(only=only, as_geojson=True).dump(stations.all(), many=True) ) - # check if allowed to update or raise 403 - station.check_if_allowed(g.current_user, "U", user_cruved["U"]) - DB.session.merge(station) - else: - DB.session.add(station) - DB.session.commit() - return station.get_geofeature() -@blueprint.route("/station/", methods=["GET"]) -@permissions.check_cruved_scope("R", True, module_code="OCCHAB") -@json_resp -def get_one_station(id_station, info_role): +@blueprint.route("/stations//", methods=["GET"]) +@permissions.check_cruved_scope("R", module_code="OCCHAB", get_scope=True) +def get_station(id_station, scope): """ Return one station @@ -120,81 +108,96 @@ def get_one_station(id_station, info_role): :rtype dict """ - station = DB.session.query(OneStation).get(id_station) - station_geojson = station.get_geofeature() - user_cruved = get_or_fetch_user_cruved( - session=session, id_role=info_role.id_role, module_code="OCCHAB" - ) - station_geojson["properties"]["rights"] = station.get_model_cruved(info_role, user_cruved) - return station_geojson - - -@blueprint.route("/station/", methods=["DELETE"]) -@permissions.check_cruved_scope("D", True, module_code="OCCHAB") -@json_resp -def delete_one_station(id_station, info_role): + station = Station.query.options( + raiseload("*"), + joinedload("observers"), + joinedload("dataset"), + joinedload("habitats").options( + joinedload("habref"), + *[joinedload(nomenc) for nomenc in OccurenceHabitat.__nomenclatures__], + ), + *[joinedload(nomenc) for nomenc in Station.__nomenclatures__], + ).get_or_404(id_station) + if not station.has_instance_permission(scope): + raise Forbidden("You do not have access to this station.") + only = [ + "observers", + "dataset", + "habitats", + *Station.__nomenclatures__, + *[f"habitats.{nomenclature}" for nomenclature in OccurenceHabitat.__nomenclatures__], + "habitats.habref", + "+cruved", + ] + station_schema = StationSchema(as_geojson=True, only=only) + return geojsonify(station_schema.dump(station)) + + +@blueprint.route("/stations/", methods=["POST"]) +@blueprint.route("/stations//", methods=["POST"]) +@login_required +def create_or_update_station(id_station=None): """ - Delete a station with its habitat and its observers + Post one occhab station (station + habitats) - .. :quickref: Occhab; + .. :quickref: OccHab; + + Post one occhab station (station + habitats) + :returns: Station as GeoJSON """ - station = DB.session.query(TStationsOcchab).get(id_station) - is_allowed = station.user_is_allowed_to(info_role, info_role.value_filter) - if is_allowed: - DB.session.delete(station) - DB.session.commit() - return station.get_geofeature() + scopes = get_scopes_by_action(module_code="OCCHAB") + if id_station is None: + action = "C" else: - return "Forbidden", 403 - - -@blueprint.route("/stations", methods=["GET"]) -@permissions.check_cruved_scope("R", True, module_code="OCCHAB") -@json_resp -def get_all_habitats(info_role): + action = "U" + scope = scopes[action] + if scope < 1: + raise Forbidden(f"You do not have {action} permission on stations.") + # Allows habitats + # Allows only observers.id_role + # Dataset are not accepted as we expect id_dataset on station directly + station_schema = StationSchema( + only=["habitats", "observers.id_role"], + dump_only=["habitats.id_station"], + unknown=EXCLUDE, + as_geojson=True, + ) + station = station_schema.load(request.json) + if station.id_station != id_station: + raise BadRequest("Unmatching id_station.") + if id_station and not station.has_instance_permission(scope): + raise Forbidden("You do not have access to this station.") + dataset = Dataset.query.filter_by(id_dataset=station.id_dataset).one_or_none() + if dataset is None: + raise BadRequest("Unexisting dataset") + if not dataset.has_instance_permission(scopes["C"]): + raise Forbidden("You do not have access to this dataset.") + db.session.add(station) + db.session.commit() + return geojsonify(station_schema.dump(station)) + + +@blueprint.route("/stations//", methods=["DELETE"]) +@permissions.check_cruved_scope("D", module_code="OCCHAB", get_scope=True) +def delete_station(id_station, scope): """ - Get all stations with their hab + Delete a station with its habitat and its observers .. :quickref: Occhab; """ - params = request.args.to_dict() - q = DB.session.query(TStationsOcchab) - - if "id_dataset" in params: - q = q.filter(TStationsOcchab.id_dataset == params["id_dataset"]) - - if "cd_hab" in params: - q = q.filter(TStationsOcchab.t_habitats.any(cd_hab=params["cd_hab"])) - - if "date_low" in params: - q = q.filter(TStationsOcchab.date_min >= params.pop("date_low")) - - if "date_up" in params: - q = q.filter(TStationsOcchab.date_max <= params.pop("date_up")) - - q = filter_query_with_cruved(TStationsOcchab, q, info_role) - q = q.order_by(TStationsOcchab.date_min.desc()) - limit = request.args.get("limit", None) or blueprint.config["NB_MAX_MAP_LIST"] - data = q.limit(limit) - - user_cruved = get_or_fetch_user_cruved( - session=session, id_role=info_role.id_role, module_code="OCCHAB" - ) - feature_list = [] - for d in data: - feature = d.get_geofeature() - feature["properties"]["rights"] = d.get_model_cruved(info_role, user_cruved) - - feature_list.append(feature) - return FeatureCollection(feature_list) + station = Station.query.get_or_404(id_station) + if not station.has_instance_permission(scope): + raise Forbidden("You do not have access to this station.") + db.session.delete(station) + db.session.commit() + return "", 204 @blueprint.route("/export_stations/", methods=["POST"]) -@permissions.check_cruved_scope("E", True, module_code="OCCHAB") +@permissions.check_cruved_scope("E", module_code="OCCHAB") def export_all_habitats( - info_role, export_format="csv", ): """ @@ -207,13 +210,12 @@ def export_all_habitats( data = request.get_json() - DB.session.execute(func.Find_SRID("gn_synthese", "synthese", "the_geom_local")).scalar() export_view = GenericTableGeo( tableName="v_export_sinp", schemaName="pr_occhab", - engine=DB.engine, + engine=db.engine, geometry_field="geom_local", - srid=srid, + srid=get_local_srid(db.session), ) file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S") @@ -226,7 +228,7 @@ def export_all_habitats( db_cols_for_shape.append(db_col) columns_to_serialize.append(db_col.key) results = ( - DB.session.query(export_view.tableDef) + db.session.query(export_view.tableDef) .filter(export_view.tableDef.columns.id_station.in_(data["idsStation"])) .limit(blueprint.config["NB_MAX_EXPORT"]) ) @@ -246,31 +248,20 @@ def export_all_habitats( FeatureCollection(features), as_file=True, filename=file_name, indent=4 ) else: - try: - dir_name, file_name = export_as_geo_file( - export_format=export_format, - export_view=export_view, - db_cols=db_cols_for_shape, - geojson_col=None, - data=results, - file_name=file_name, - ) - return send_from_directory(dir_name, file_name, as_attachment=True) - - except GeonatureApiError as e: - message = str(e) - - module_url = TModules.query.filter_by(module_code="OCCHAB").one().module_path - return render_template( - "error.html", - error=message, - redirect=current_app.config["URL_APPLICATION"] + "/#/" + module_url, + dir_name, file_name = export_as_geo_file( + export_format=export_format, + export_view=export_view, + db_cols=db_cols_for_shape, + geojson_col=None, + data=results, + file_name=file_name, ) + return send_from_directory(dir_name, file_name, as_attachment=True) @blueprint.route("/defaultNomenclatures", methods=["GET"]) -@json_resp -def getDefaultNomenclatures(): +@login_required +def get_default_nomenclatures(): """Get default nomenclatures define in occhab module .. :quickref: Occhab; @@ -284,47 +275,20 @@ def getDefaultNomenclatures(): organism = params["organism"] types = request.args.getlist("mnemonique") - q = DB.session.query( - distinct(DefaultNomenclaturesValue.mnemonique_type), + q = db.session.query( + distinct(DefaultNomenclatureValue.mnemonique_type), func.pr_occhab.get_default_nomenclature_value( - DefaultNomenclaturesValue.mnemonique_type, organism + DefaultNomenclatureValue.mnemonique_type, organism ), ) if len(types) > 0: - q = q.filter(DefaultNomenclaturesValue.mnemonique_type.in_(tuple(types))) + q = q.filter(DefaultNomenclatureValue.mnemonique_type.in_(tuple(types))) data = q.all() formated_dict = {} for d in data: nomenclature_obj = None if d[1]: - nomenclature_obj = DB.session.query(TNomenclatures).get(d[1]).as_dict() + nomenclature_obj = db.session.query(TNomenclatures).get(d[1]).as_dict() formated_dict[d[0]] = nomenclature_obj return formated_dict - - # TODO - # @blueprint.route("/stations/dataset/", methods=["POST", "GET"]) - # @json_resp - # def getStationsDataset(id_dataset): - """ - Get all stations of a dataset - """ - - -# data = dict(request.get_json()) -# sql = text(""" -# SELECT geom_4326 -# FROM pr_occhab.t_stations -# WHERE id_dataset = :id_dataset AND geom_4326 <> ST_MakeEnvelope( -# :xmin, :ymin, :xmax, :ymax -# LIMIT 50 -# ) -# """) -# DB.engine.execute( -# sql, -# id_dataset=id_dataset, -# xmin="", -# ymin="", -# xmax="", -# ymax="", -# ) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/85efc9bb5a47_declare_permissions.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/85efc9bb5a47_declare_permissions.py new file mode 100644 index 0000000000..bb3d430478 --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/85efc9bb5a47_declare_permissions.py @@ -0,0 +1,96 @@ +"""declare permissions + +Revision ID: 85efc9bb5a47 +Revises: 2984569d5df6 +Create Date: 2023-05-17 14:34:51.804258 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "85efc9bb5a47" +down_revision = "2984569d5df6" +branch_labels = None +depends_on = ("f051b88a57fd",) + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('OCCHAB', 'ALL', 'C', True, 'Créer des habitats') + ,('OCCHAB', 'ALL', 'R', True, 'Voir les habitats') + ,('OCCHAB', 'ALL', 'U', True, 'Modifier les habitats') + ,('OCCHAB', 'ALL', 'E', True, 'Exporter les habitats') + ,('OCCHAB', 'ALL', 'D', True, 'Supprimer des habitats') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + """ + ) + op.execute( + """ + WITH bad_permissions AS ( + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_commons.t_modules m + USING (id_module) + WHERE + m.module_code = 'OCCHAB' + EXCEPT + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.t_permissions_available pa ON + (p.id_module = pa.id_module + AND p.id_object = pa.id_object + AND p.id_action = pa.id_action) + ) + DELETE + FROM + gn_permissions.t_permissions p + USING bad_permissions bp + WHERE + bp.id_permission = p.id_permission; + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions_available pa + USING + gn_commons.t_modules m + WHERE + pa.id_module = m.id_module + AND + module_code = 'OCCHAB' + """ + ) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py index bcea9ea7ef..2481cca224 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py @@ -1,191 +1,207 @@ -from flask import current_app +from datetime import datetime +from packaging import version + +from flask import current_app, g from geoalchemy2 import Geometry +import sqlalchemy as sa from sqlalchemy import ForeignKey -from sqlalchemy.orm import relationship +from sqlalchemy.orm import relationship, synonym from sqlalchemy.sql import select, func, and_ +from sqlalchemy.schema import UniqueConstraint, FetchedValue from sqlalchemy.dialects.postgresql import UUID +import flask_sqlalchemy + +if version.parse(flask_sqlalchemy.__version__) >= version.parse("3"): + from flask_sqlalchemy.query import Query +else: + from flask_sqlalchemy import BaseQuery as Query from pypnusershub.db.models import User -from pypnnomenclature.models import TNomenclatures +from pypnnomenclature.models import TNomenclatures as Nomenclature +from pypnnomenclature.utils import NomenclaturesMixin from pypn_habref_api.models import Habref from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable - -from geonature.core.utils import ModelCruvedAutorization -from geonature.utils.env import DB - - -class CorStationObserverOccHab(DB.Model): - __tablename__ = "cor_station_observer" +from utils_flask_sqla_geo.mixins import GeoFeatureCollectionMixin + +from geonature.utils.env import db +from geonature.core.gn_meta.models import TDatasets as Dataset + + +cor_station_observer = db.Table( + "cor_station_observer", + db.Column("id_cor_station_observer", db.Integer, primary_key=True), + db.Column("id_station", db.Integer, ForeignKey("pr_occhab.t_stations.id_station")), + db.Column("id_role", db.Integer, ForeignKey(User.id_role)), + UniqueConstraint("id_station", "id_role"), # , "unique_cor_station_observer"), + schema="pr_occhab", +) + + +class StationQuery(GeoFeatureCollectionMixin, Query): + def filter_by_params(self, params): + qs = self + id_dataset = params.get("id_dataset", type=int) + if id_dataset: + qs = qs.filter_by(id_dataset=id_dataset) + cd_hab = params.get("cd_hab", type=int) + if cd_hab: + qs = qs.filter(Station.habitats.any(OccurenceHabitat.cd_hab == cd_hab)) + date_low = params.get("date_low", type=lambda x: datetime.strptime(x, "%Y-%m-%d")) + if date_low: + qs = qs.filter(Station.date_min >= date_low) + date_up = params.get("date_up", type=lambda x: datetime.strptime(x, "%Y-%m-%d")) + if date_up: + qs = qs.filter(Station.date_max <= date_up) + return qs + + def filter_by_scope(self, scope, user=None): + if user is None: + user = g.current_user + if scope == 0: + self = self.filter(sa.false()) + elif scope in (1, 2): + ds_list = Dataset.query.filter_by_scope(scope).with_entities(Dataset.id_dataset) + self = self.filter( + sa.or_( + Station.observers.any(id_role=user.id_role), + Station.id_dataset.in_([ds.id_dataset for ds in ds_list.all()]), + ) + ) + return self + + habref = db.relationship(Habref, lazy="joined") + + +class Station(NomenclaturesMixin, db.Model): + __tablename__ = "t_stations" __table_args__ = {"schema": "pr_occhab"} - id_cor_station_observer = DB.Column(DB.Integer, primary_key=True) - id_station = DB.Column("id_station", DB.Integer, ForeignKey("pr_occhab.t_stations.id_station")) - id_role = DB.Column("id_role", DB.Integer, ForeignKey("utilisateurs.t_roles.id_role")) + query_class = StationQuery + id_station = db.Column(db.Integer, primary_key=True) + unique_id_sinp_station = db.Column( + UUID(as_uuid=True), default=select([func.uuid_generate_v4()]) + ) + id_dataset = db.Column(db.Integer, ForeignKey(Dataset.id_dataset), nullable=False) + dataset = relationship(Dataset) + date_min = db.Column(db.DateTime, server_default=FetchedValue()) + date_max = db.Column(db.DateTime, server_default=FetchedValue()) + observers_txt = db.Column(db.Unicode(length=500)) + station_name = db.Column(db.Unicode(length=1000)) + is_habitat_complex = db.Column(db.Boolean) + altitude_min = db.Column(db.Integer) + altitude_max = db.Column(db.Integer) + depth_min = db.Column(db.Integer) + depth_max = db.Column(db.Integer) + area = db.Column(db.BigInteger) + comment = db.Column(db.Unicode) + id_digitiser = db.Column(db.Integer) + geom_4326 = db.Column(Geometry("GEOMETRY")) + + habitats = relationship( + "OccurenceHabitat", + lazy="joined", + cascade="all, delete-orphan", + back_populates="station", + ) + t_habitats = synonym(habitats) + observers = db.relationship("User", secondary=cor_station_observer, lazy="joined") -@serializable -class THabitatsOcchab(DB.Model): - __tablename__ = "t_habitats" - __table_args__ = {"schema": "pr_occhab"} - id_habitat = DB.Column(DB.Integer, primary_key=True) - id_station = DB.Column(DB.Integer, ForeignKey("pr_occhab.t_stations.id_station")) - unique_id_sinp_hab = DB.Column(UUID(as_uuid=True), default=select([func.uuid_generate_v4()])) - cd_hab = DB.Column(DB.Integer, ForeignKey(Habref.cd_hab)) - nom_cite = DB.Column(DB.Unicode) - id_nomenclature_determination_type = DB.Column( - DB.Integer, ForeignKey(TNomenclatures.id_nomenclature) + id_nomenclature_exposure = db.Column( + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + ) + nomenclature_exposure = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_exposure], ) - determiner = DB.Column(DB.Unicode) - id_nomenclature_collection_technique = DB.Column( - DB.Integer, ForeignKey(TNomenclatures.id_nomenclature) + id_nomenclature_area_surface_calculation = db.Column( + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + ) + nomenclature_area_surface_calculation = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_area_surface_calculation], + ) + id_nomenclature_geographic_object = db.Column( + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + ) + nomenclature_geographic_object = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_geographic_object], ) - recovery_percentage = DB.Column(DB.Float) - id_nomenclature_abundance = DB.Column(DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) - technical_precision = DB.Column(DB.Unicode) - id_nomenclature_sensitvity = DB.Column(DB.Integer) - habref = DB.relationship(Habref, lazy="joined") + def has_instance_permission(self, scope): + if scope == 0: + return False + elif scope in (1, 2): + # L’utilisateur est observateur de la station + # ou à les droits sur le JDD auquel est rattaché la station. + return g.current_user in self.observers or self.dataset.has_instance_permission(scope) + elif scope == 3: + return True @serializable -@geoserializable -class TStationsOcchab(ModelCruvedAutorization): - __tablename__ = "t_stations" +class OccurenceHabitat(NomenclaturesMixin, db.Model): + __tablename__ = "t_habitats" __table_args__ = {"schema": "pr_occhab"} - id_station = DB.Column(DB.Integer, primary_key=True) - unique_id_sinp_station = DB.Column( - UUID(as_uuid=True), default=select([func.uuid_generate_v4()]) - ) - id_dataset = DB.Column(DB.Integer, ForeignKey("gn_meta.t_datasets.id_dataset")) - date_min = DB.Column(DB.DateTime) - date_max = DB.Column(DB.DateTime) - observers_txt = DB.Column(DB.Unicode) - station_name = DB.Column(DB.Unicode) - is_habitat_complex = DB.Column(DB.Boolean) - id_nomenclature_exposure = DB.Column(DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) - altitude_min = DB.Column(DB.Integer) - altitude_max = DB.Column(DB.Integer) - depth_min = DB.Column(DB.Integer) - depth_max = DB.Column(DB.Integer) - area = DB.Column(DB.Float) - id_nomenclature_area_surface_calculation = DB.Column( - DB.Integer, ForeignKey(TNomenclatures.id_nomenclature) - ) - id_nomenclature_geographic_object = DB.Column( - DB.Integer, ForeignKey(TNomenclatures.id_nomenclature) - ) - comment = DB.Column(DB.Unicode) - id_digitiser = DB.Column(DB.Integer) - geom_4326 = DB.Column(Geometry("GEOMETRY", 4626)) - - t_habitats = relationship("THabitatsOcchab", lazy="joined", cascade="all, delete-orphan") - dataset = relationship("TDatasets", lazy="joined") - observers = DB.relationship( - User, - lazy="joined", - secondary=CorStationObserverOccHab.__table__, - primaryjoin=(CorStationObserverOccHab.id_station == id_station), - secondaryjoin=(CorStationObserverOccHab.id_role == User.id_role), - foreign_keys=[ - CorStationObserverOccHab.id_station, - CorStationObserverOccHab.id_role, - ], - ) - - # overright the constructor - # to inherit of ReleModel, the constructor must define some mandatory attribute - def __init__(self, *args, **kwargs): - super(TStationsOcchab, self).__init__(*args, **kwargs) - self.observer_rel = getattr(self, "observers") - self.dataset_rel = getattr(self, "dataset") - self.id_digitiser_col = getattr(self, "id_digitiser") - self.id_dataset_col = getattr(self, "id_dataset") - - def get_geofeature(self): - return self.as_geofeature( - "geom_4326", - "id_station", - fields=[ - "observers", - "t_habitats", - "t_habitats.habref", - "dataset", - ], - ) - -@serializable -class OneHabitat(THabitatsOcchab): - """ - Class which extend THabitatsOcchab with nomenclatures relationships - use for get ONE habitat and station - """ - - determination_method = DB.relationship( - TNomenclatures, - primaryjoin=( - TNomenclatures.id_nomenclature == THabitatsOcchab.id_nomenclature_determination_type - ), + id_habitat = db.Column(db.Integer, primary_key=True) + id_station = db.Column(db.Integer, ForeignKey(Station.id_station), nullable=False) + station = db.relationship(Station, lazy="joined", back_populates="habitats") + unique_id_sinp_hab = db.Column( + UUID(as_uuid=True), + default=select([func.uuid_generate_v4()]), + nullable=False, ) - - collection_technique = DB.relationship( - TNomenclatures, - primaryjoin=( - TNomenclatures.id_nomenclature == THabitatsOcchab.id_nomenclature_collection_technique - ), + cd_hab = db.Column(db.Integer, ForeignKey("ref_habitats.habref.cd_hab"), nullable=False) + habref = db.relationship("Habref", lazy="joined") + nom_cite = db.Column(db.Unicode, nullable=False) + determiner = db.Column(db.Unicode) + recovery_percentage = db.Column(db.Float) + technical_precision = db.Column(db.Unicode) + + id_nomenclature_determination_type = db.Column( + db.Integer, ForeignKey(Nomenclature.id_nomenclature) ) - abundance = DB.relationship( - TNomenclatures, - primaryjoin=(TNomenclatures.id_nomenclature == THabitatsOcchab.id_nomenclature_abundance), + nomenclature_determination_type = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_determination_type], + ) + id_nomenclature_collection_technique = db.Column( + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + nullable=False, + ) + nomenclature_collection_technique = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_collection_technique], + ) + id_nomenclature_abundance = db.Column( + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + ) + nomenclature_abundance = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_abundance], + ) + id_nomenclature_sensitivity = db.Column( + "id_nomenclature_sensitvity", # TODO fix db column typo + db.Integer, + ForeignKey(Nomenclature.id_nomenclature), + ) + nomenclature_sensitivity = db.relationship( + Nomenclature, + foreign_keys=[id_nomenclature_sensitivity], ) @serializable -@geoserializable -class OneStation(TStationsOcchab): - exposure = DB.relationship( - TNomenclatures, - primaryjoin=(TNomenclatures.id_nomenclature == TStationsOcchab.id_nomenclature_exposure), - ) - area_surface_calculation = DB.relationship( - TNomenclatures, - primaryjoin=( - TNomenclatures.id_nomenclature - == TStationsOcchab.id_nomenclature_area_surface_calculation - ), - ) - geographic_object = DB.relationship( - TNomenclatures, - primaryjoin=( - TNomenclatures.id_nomenclature == TStationsOcchab.id_nomenclature_geographic_object - ), - ) - - t_one_habitats = relationship("OneHabitat", lazy="joined") - - def get_geofeature(self): - return self.as_geofeature( - "geom_4326", - "id_station", - fields=[ - "observers", - "t_one_habitats", - "exposure", - "dataset", - "area_surface_calculation", - "geographic_object", - "t_one_habitats.determination_method", - "t_one_habitats.collection_technique", - "t_one_habitats.abundance", - "t_habitats.habref", - ], - ) - - -@serializable -class DefaultNomenclaturesValue(DB.Model): +class DefaultNomenclatureValue(db.Model): __tablename__ = "defaults_nomenclatures_value" __table_args__ = {"schema": "pr_occhab"} - mnemonique_type = DB.Column(DB.Integer, primary_key=True) - id_organism = DB.Column(DB.Integer, primary_key=True) - id_nomenclature = DB.Column(DB.Integer, primary_key=True) + mnemonique_type = db.Column(db.Integer, primary_key=True) + id_organism = db.Column(db.Integer, primary_key=True) + id_nomenclature = db.Column(db.Integer, primary_key=True) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/query.py b/contrib/gn_module_occhab/backend/gn_module_occhab/query.py deleted file mode 100644 index 224f986196..0000000000 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/query.py +++ /dev/null @@ -1,71 +0,0 @@ -from sqlalchemy import literal, or_ -from sqlalchemy.sql import exists - -from geonature.core.gn_meta.models import TDatasets -from geonature.utils.env import DB -from geonature.utils.errors import GeonatureApiError - -from .models import CorStationObserverOccHab - - -def filter_query_with_cruved( - model, - q, - user, - id_station_col="id_station", - id_dataset_column="id_dataset", - observers_column="observers_txt", - id_digitiser_column="id_digitiser", - filter_on_obs_txt=True, - with_generic_table=False, -): - """ - Filter the query with the cruved authorization of a user - - Returns: - - A SQLA Query object - """ - # if with geniric table , the column are located in model.columns, else in model - if with_generic_table: - model_temp = model.columns - else: - model_temp = model - # get the mandatory column - try: - model_id_station_col = getattr(model_temp, id_station_col) - model_id_dataset_column = getattr(model_temp, id_dataset_column) - model_observers_column = getattr(model_temp, observers_column) - model_id_digitiser_column = getattr(model_temp, id_digitiser_column) - except AttributeError as e: - raise GeonatureApiError( - """the {model} table does not have a column {e} - If you change the {model} table, please edit your synthese config (cf EXPORT_***_COL) - """.format( - e=e, model=model - ) - ) - if user.value_filter in ("1", "2"): - - sub_query_id_role = ( - DB.session.query(CorStationObserverOccHab) - .filter(CorStationObserverOccHab.id_role == user.id_role) - .exists() - ) - ors_filters = [ - sub_query_id_role, - model_id_digitiser_column == user.id_role, - ] - if filter_on_obs_txt: - user_fullname1 = user.nom_role + " " + user.prenom_role + "%" - user_fullname2 = user.prenom_role + " " + user.nom_role + "%" - ors_filters.append(model_observers_column.ilike(user_fullname1)) - ors_filters.append(model_observers_column.ilike(user_fullname2)) - if user.value_filter == "1": - allowed_datasets = TDatasets.query.filter_by_scope(1).exists() - ors_filters.append(allowed_datasets) - q = q.filter(or_(*ors_filters)) - elif user.value_filter == "2": - allowed_datasets = TDatasets.query.filter_by_scope(2).exists() - ors_filters.append(allowed_datasets) - q = q.filter(or_(*ors_filters)) - return q diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py new file mode 100644 index 0000000000..1dfa4f2f46 --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py @@ -0,0 +1,70 @@ +from marshmallow import fields, validates_schema, EXCLUDE +from marshmallow.decorators import post_dump +from marshmallow.exceptions import ValidationError +from marshmallow_sqlalchemy import auto_field +from marshmallow_sqlalchemy.fields import Nested + +from geonature.utils.env import db, ma +from geonature.utils.schema import CruvedSchemaMixin +from geonature.core.gn_meta.schemas import DatasetSchema +from geonature.core.gn_permissions.tools import get_scopes_by_action + +from pypnusershub.schemas import UserSchema +from pypnnomenclature.utils import NomenclaturesConverter +from pypn_habref_api.schemas import HabrefSchema +from utils_flask_sqla.schema import SmartRelationshipsMixin +from utils_flask_sqla_geo.schema import GeoAlchemyAutoSchema, GeoModelConverter + +from gn_module_occhab.models import Station, OccurenceHabitat + + +class StationConverter(NomenclaturesConverter, GeoModelConverter): + pass + + +class StationSchema(CruvedSchemaMixin, SmartRelationshipsMixin, GeoAlchemyAutoSchema): + class Meta: + model = Station + include_fk = True + load_instance = True + sqla_session = db.session + feature_id = "id_station" + model_converter = StationConverter + + __module_code__ = "OCCHAB" + + id_station = auto_field(allow_none=True) + + date_min = fields.DateTime("%Y-%m-%d") + date_max = fields.DateTime("%Y-%m-%d") + + habitats = Nested("OccurenceHabitatSchema", unknown=EXCLUDE, many=True) + observers = Nested(UserSchema, unknown=EXCLUDE, many=True) + dataset = Nested(DatasetSchema, dump_only=True) + + @validates_schema + def validate_habitats(self, data, **kwargs): + """ + Ensure this schema is not leveraged to retrieve habitats from other station + """ + for hab in data["habitats"]: + if hab.id_station is not None and data.get("id_station") != hab.id_station: + raise ValidationError( + "Habitat does not belong to this station.", field_name="habitats" + ) + + +class OccurenceHabitatSchema(SmartRelationshipsMixin, ma.SQLAlchemyAutoSchema): + class Meta: + model = OccurenceHabitat + include_fk = True + load_instance = True + sqla_session = db.session + model_converter = NomenclaturesConverter + + id_habitat = auto_field(allow_none=True) + id_station = auto_field(allow_none=True) + unique_id_sinp_hab = auto_field(allow_none=True) + + station = Nested(StationSchema) + habref = Nested(HabrefSchema, dump_only=True) diff --git a/contrib/gn_module_occhab/config/settings.ini.sample b/contrib/gn_module_occhab/config/settings.ini.sample deleted file mode 100644 index ebc5c34a9b..0000000000 --- a/contrib/gn_module_occhab/config/settings.ini.sample +++ /dev/null @@ -1 +0,0 @@ -# Fichier de paramètre pour l'installation des scripts de BDD diff --git a/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.ts b/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.ts index acc18e73b9..d10a89c3a7 100644 --- a/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/delete-modal/delete-modal.component.ts @@ -1,11 +1,11 @@ -import { Component, OnInit, Input, Output, EventEmitter } from "@angular/core"; -import { CommonService } from "@geonature_common/service/common.service"; -import { OccHabDataService } from "../../services/data.service"; -import { Router } from "@angular/router"; +import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core'; +import { CommonService } from '@geonature_common/service/common.service'; +import { OccHabDataService } from '../../services/data.service'; +import { Router } from '@angular/router'; @Component({ - selector: "pnx-occhab-delete", - templateUrl: "./delete-modal.component.html" + selector: 'pnx-occhab-delete', + templateUrl: './delete-modal.component.html', }) export class ModalDeleteStation implements OnInit { @Input() idStation: number; @@ -22,20 +22,11 @@ export class ModalDeleteStation implements OnInit { deleteStation() { this.onDelete.emit(); - this._occHabDataService.deleteOneStation(this.idStation).subscribe( - d => { - this._commonService.regularToaster( - "success", - "Station supprimée avec succès" - ); + this._occHabDataService.deleteStation(this.idStation).subscribe( + (d) => { + this._commonService.regularToaster('success', 'Station supprimée avec succès'); - this._router.navigate(["occhab"]); - }, - () => { - this._commonService.regularToaster( - "error", - "Erreur lors de la suppression de la station" - ); + this._router.navigate(['/occhab']); }, () => { this.c(); diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.html index ba2eac6c1f..e0c0a07b26 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.html @@ -2,7 +2,7 @@
    @@ -11,25 +11,25 @@
    - Station n° {{stationContent?.id_station}} + Station n° {{station.properties.id_station}} @@ -45,29 +45,29 @@
    Jeu de donnée :
    -

    {{stationContent?.dataset.dataset_name}}

    +

    {{station?.properties.dataset?.dataset_name}}

    Date :

    - {{stationContent?.date_min | date:'dd/MM/yyyy'}} - - {{stationContent?.date_max | date:'dd/MM/yyyy'}} + {{station?.properties.date_min | date:'dd/MM/yyyy'}} - + {{station?.properties.date_max | date:'dd/MM/yyyy'}}

    Observateur(s) :
    -
    -

    +

    +

    {{obs.nom_role}} {{obs.prenom_role}} ,

    -

    {{stationContent?.observers_txt}}

    +

    {{station?.properties.observers_txt}}

    @@ -76,17 +76,17 @@
    Altitude (en m) :
    -

    {{stationContent?.altitude_min}} - {{stationContent?.altitude_max}}

    +

    {{station?.properties.altitude_min}} - {{station?.properties.altitude_max}}

    Surface (en m²) :
    -

    {{stationContent?.area}} ( - {{stationContent?.area_surface_calculation?.label_default}} )

    +

    {{station?.properties.area}} ( + {{station?.properties.nomenclature_area_surface_calculation?.label_default}} )

    Type d'information géographique :
    -

    - {{stationContent?.geographic_object?.label_default}}

    +

    + {{station.properties.nomenclature_geographic_object.label_default}}

    @@ -97,7 +97,7 @@
    Habitats de la station :
    {{hab.nom_cite}} @@ -113,7 +113,7 @@
    Habitats de la station :
    Commentaire :
    -

    {{stationContent?.comment}}

    +

    {{station?.properties.comment}}

    @@ -133,12 +133,12 @@
    {{currentHab?.nom_cite}}
    Technique de collecte : - {{currentHab?.collection_technique?.label_default}}
    + {{currentHab?.nomenclature_collection_technique?.label_default}}
    Méthode de détermination : - {{currentHab?.determination_method?.label_default}}
    + {{currentHab?.nomenclature_determination_type?.label_default}}
    Pourcentage de recouvrement : - {{currentHab?.recovery_percentage}}
    - Abondance : {{currentHab?.abundance?.label_default}}
    + {{currentHab?.nomenclature_recovery_percentage}}
    + Abondance : {{currentHab?.nomenclature_abundance?.label_default}}
    Information sur l'habitat (Habref) :
    @@ -243,8 +243,8 @@

    Correspondances

    diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.ts index 51b9ef093a..aa310670b4 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-info/occhab-info.component.ts @@ -1,24 +1,25 @@ -import { Component, OnInit, OnDestroy } from "@angular/core"; -import { OccHabDataService } from "../../services/data.service"; -import { Subscription } from "rxjs/Subscription"; -import { ActivatedRoute } from "@angular/router"; -import { DataFormService } from "@geonature_common/form/data-form.service"; -import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; -import { CommonService } from "@geonature_common/service/common.service"; +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { OccHabDataService } from '../../services/data.service'; +import { Subscription } from 'rxjs/Subscription'; +import { ActivatedRoute } from '@angular/router'; +import { DataFormService } from '@geonature_common/form/data-form.service'; +import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; +import { CommonService } from '@geonature_common/service/common.service'; + +import { StationFeature } from '../../models'; @Component({ - selector: "pnx-occhab-info", - templateUrl: "./occhab-info.component.html", - styleUrls: ["./occhab-info.component.scss", "../responsive-map.scss"] + selector: 'pnx-occhab-info', + templateUrl: './occhab-info.component.html', + styleUrls: ['./occhab-info.component.scss', '../responsive-map.scss'], }) export class OcchabInfoComponent implements OnInit, OnDestroy { - public oneStation; - public stationContent; + public station: StationFeature; public currentHab; public habInfo: Array; public modalContent; public selectedIndex; - private _sub: Subscription; + constructor( private _occHabDataService: OccHabDataService, private _route: ActivatedRoute, @@ -28,38 +29,24 @@ export class OcchabInfoComponent implements OnInit, OnDestroy { private _commonService: CommonService ) {} - ngOnInit() {} - - ngAfterViewInit() { - // get the id from the route - this._sub = this._route.params.subscribe(params => { - if (params["id_station"]) { - this._occHabDataService - .getOneStation(params["id_station"]) - .subscribe(station => { - this.stationContent = station.properties; - this.oneStation = station; - }); - } + ngOnInit() { + this._route.data.subscribe(({ station }) => { + this.station = station; }); } setCurrentHab(index) { - this.currentHab = this.stationContent.t_one_habitats[index]; + this.currentHab = this.station.properties.habitats[index]; this.selectedIndex = index; } getHabInfo(cd_hab) { this._dataService.getHabitatInfo(cd_hab).subscribe( - data => { + (data) => { this.habInfo = data; }, () => { this.habInfo = null; - this._commonService.regularToaster( - "error", - "Erreur lors de l'interrogation Habref" - ); } ); } @@ -70,14 +57,12 @@ export class OcchabInfoComponent implements OnInit, OnDestroy { } openModal(modal) { - this.modal.open(modal, { size: "lg" }); + this.modal.open(modal, { size: 'lg' }); } openDeleteModal(modalDelete) { this._ngbModal.open(modalDelete); } - ngOnDestroy() { - this._sub.unsubscribe(); - } + ngOnDestroy() {} } diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component.ts index b5deba8c1e..deb59e9ec2 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component.ts @@ -1,10 +1,10 @@ -import { Component, OnInit, Output, EventEmitter } from "@angular/core"; -import { MapService } from "@geonature_common/map/map.service"; -import * as L from "leaflet"; +import { Component, OnInit, Output, EventEmitter } from '@angular/core'; +import { MapService } from '@geonature_common/map/map.service'; +import * as L from 'leaflet'; @Component({ - selector: "pnx-occhab-dataset-map-overlay", - template: "" + selector: 'pnx-occhab-dataset-map-overlay', + template: '', }) export class OccHabDatasetMapOverlayComponent implements OnInit { @Output() getBoundingBox = new EventEmitter(); @@ -12,22 +12,22 @@ export class OccHabDatasetMapOverlayComponent implements OnInit { ngOnInit() { const CustomLegend = this._mapService.addCustomLegend( - "topright", - "occHabLayerControl", - "url(assets/images/location-pointer.png)" + 'topright', + 'occHabLayerControl', + 'url(assets/images/location-pointer.png)' ); this._mapService.map.addControl(new CustomLegend()); // L.DomEvent.disableClickPropagation( // document.getElementById("occHabLayerControl") // ); - document.getElementById("occHabLayerControl").onclick = () => { + document.getElementById('occHabLayerControl').onclick = () => { const bounds = this._mapService.map.getBounds(); this.getBoundingBox.emit({ southEast: bounds.getSouthEast(), southWest: bounds.getSouthWest(), northEast: bounds.getNorthEast(), - northWest: bounds.getNorthWest() + northWest: bounds.getNorthWest(), }); }; } diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html index 1ed25974e0..88afdfc32c 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.html @@ -1,30 +1,30 @@
    -
    +
    - - - - + + + + - - - + + +
    Habitat Abondance #HabitatAbondance#
    {{occHabForm.stationForm.value.t_habitats.length - i}} {{hab?.nom_cite}} {{hab?.id_nomenclature_abundance?.label_default}}{{ occHabForm.stationForm.value.habitats.length - i }}{{ hab?.nom_cite }}{{ hab?.id_nomenclature_abundance?.label_default }} - + - +
    - -
    -
    - - + - -
    @@ -97,25 +82,18 @@ id="overlay" [class.disabled-form]="disabledForm" (click)="formIsDisable()" - > -
    + >
    -
    +
    - + Station
    -
    +
    @@ -124,7 +102,7 @@
    @@ -134,11 +112,14 @@
    {{ 'MetaData.Datasets' | translate }}: - {{currentEditingStation?.properties?.dataset?.dataset_name}} + {{ currentEditingStation?.properties?.dataset?.dataset_name }}
    @@ -146,6 +127,7 @@
    label="{{ 'MetaData.Datasets' | translate }}" [parentFormControl]="occHabForm.stationForm.get('id_dataset')" moduleCode="OCCHAB" + creatableInModule="OCCHAB" data-qa="pnx-occhab-form-dataset" > @@ -157,23 +139,25 @@
    @@ -181,7 +165,7 @@
    {{ 'Releve.DateMaxError' | translate }} @@ -191,92 +175,82 @@
    Altitude min + />
    Altitude max + />
    {{ 'Releve.AltitudeError' | translate }}
    -
    +
    Profondeur min + />
    Profondeur max + />
    Surface (en m²) + />
    -
    -
    @@ -360,30 +336,19 @@
    Valider la station -
    -
    - +
    + Habitat
    -
    -
    +
    +
    Sélectionner une typologie d'habitat
    @@ -427,11 +380,15 @@
    (onChange)="occHabForm.patchNomCite($event)" > -
    +
    >
    - -
    Déterminateur + [formControl]=" + occHabForm.stationForm + .get('habitats') + .controls[occHabForm.currentEditingHabForm].get('determiner') + " + />
    - > -
    + [ngClass]="{ + 'ng-invalid': + occHabForm.stationForm.get('habitats').controls[ + occHabForm.currentEditingHabForm + ].errors?.invalidTechnicalValues + }" + [formControl]=" + occHabForm.stationForm + .get('habitats') + .controls[occHabForm.currentEditingHabForm].get('technical_precision') + " + />
    Veuillez fournir des précisions concernant la technique de collecte
    Pourcentage de recouvrement + [formControl]=" + occHabForm.stationForm + .get('habitats') + .controls[occHabForm.currentEditingHabForm].get('recovery_percentage') + " + />
    > - -
    +
    - -
    -
    -
    -
    - - -
    diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts index deb84e75e7..c6b36350c9 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts @@ -1,32 +1,30 @@ -import { Component, OnInit } from "@angular/core"; -import { OcchabFormService } from "../../services/form-service"; -import { OcchabStoreService } from "../../services/store.service"; -import { DataFormService } from "@geonature_common/form/data-form.service"; -import { OccHabDataService } from "../../services/data.service"; -import { leafletDrawOption } from "@geonature_common/map/leaflet-draw.options"; -import { MapService } from "@geonature_common/map/map.service"; -import { ActivatedRoute, Router } from "@angular/router"; -import { Subscription } from "rxjs/Subscription"; -import { CommonService } from "@geonature_common/service/common.service"; -import { AppConfig } from "@geonature_config/app.config"; -import { ModuleConfig } from "../../module.config"; -import { filter } from "rxjs/operators"; +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { OcchabFormService } from '../../services/form-service'; +import { OcchabStoreService } from '../../services/store.service'; +import { OccHabDataService } from '../../services/data.service'; +import { leafletDrawOption } from '@geonature_common/map/leaflet-draw.options'; +import { ActivatedRoute, Router } from '@angular/router'; +import { Subscription } from 'rxjs/Subscription'; +import { CommonService } from '@geonature_common/service/common.service'; +import { filter } from 'rxjs/operators'; +import { ConfigService } from '@geonature/services/config.service'; +import { StationFeature } from '../../models'; +import { FormService } from '@geonature_common/form/form.service'; + @Component({ - selector: "pnx-occhab-form", - templateUrl: "occhab-form.component.html", - styleUrls: ["./occhab-form.component.scss", "../responsive-map.scss"], - providers: [OcchabFormService] + selector: 'pnx-occhab-form', + templateUrl: 'occhab-form.component.html', + styleUrls: ['./occhab-form.component.scss', '../responsive-map.scss'], + providers: [OcchabFormService], }) -export class OccHabFormComponent implements OnInit { +export class OccHabFormComponent implements OnInit, OnDestroy { public leafletDrawOptions = leafletDrawOption; public filteredHab: any; - private _sub: Subscription; + private _sub: Array = []; public editionMode = false; - public MAP_SMALL_HEIGHT = "50vh !important;"; - public MAP_FULL_HEIGHT = "87vh"; + public MAP_SMALL_HEIGHT = '50vh !important;'; + public MAP_FULL_HEIGHT = '87vh'; public mapHeight = this.MAP_FULL_HEIGHT; - public appConfig = AppConfig; - public moduleConfig = ModuleConfig; public showHabForm = false; public showTabHab = false; public showDepth = false; @@ -34,10 +32,13 @@ export class OccHabFormComponent implements OnInit { public firstFileLayerMessage = true; public currentGeoJsonFileLayer; public markerCoordinates; - public currentEditingStation: any; + public currentEditingStation: StationFeature; // boolean tocheck if the station has at least one hab (control the validity of the form) public atLeastOneHab = false; + public isCollapseDepth = true; + public isCollaspeTypo = true; + constructor( public occHabForm: OcchabFormService, private _occHabDataService: OccHabDataService, @@ -45,9 +46,9 @@ export class OccHabFormComponent implements OnInit { private _route: ActivatedRoute, private _router: Router, private _commonService: CommonService, - private _gnDataService: DataFormService, - private _mapService: MapService - ) { } + public config: ConfigService, + public globalFormService: FormService + ) {} ngOnInit() { this.leafletDrawOptions; @@ -56,29 +57,27 @@ export class OccHabFormComponent implements OnInit { leafletDrawOption.draw.rectangle = false; this.occHabForm.stationForm = this.occHabForm.initStationForm(); - this.occHabForm.stationForm.controls.geom_4326.valueChanges.subscribe(d => { + + this.occHabForm.stationForm.controls.geom_4326.valueChanges.subscribe((d) => { this.disabledForm = false; }); - this.storeService.defaultNomenclature$ - .pipe(filter(val => val !== null)) - .subscribe(val => { - this.occHabForm.patchDefaultNomenclaureStation(val); - }); + this.storeService.defaultNomenclature$.pipe(filter((val) => val !== null)).subscribe((val) => { + this.occHabForm.patchDefaultNomenclaureStation(val); + }); } ngAfterViewInit() { // get the id from the route - this._sub = this._route.params.subscribe(params => { - if (params["id_station"]) { - this.editionMode = true; - this.atLeastOneHab = true; - this.showHabForm = false; - this.showTabHab = true; - this._occHabDataService - .getOneStation(params["id_station"]) - .subscribe(station => { + this._sub.push( + this._route.params.subscribe((params) => { + if (params['id_station']) { + this.editionMode = true; + this.atLeastOneHab = true; + this.showHabForm = false; + this.showTabHab = true; + this._occHabDataService.getStation(params['id_station']).subscribe((station: any) => { this.currentEditingStation = station; - if (station.geometry.type == "Point") { + if (station.geometry.type == 'Point') { // set the input for the marker component this.markerCoordinates = station.geometry.coordinates; } else { @@ -86,24 +85,26 @@ export class OccHabFormComponent implements OnInit { this.currentGeoJsonFileLayer = station.geometry; } this.occHabForm.patchStationForm(station); + if (station.properties.date_min != station.properties.date_max) { + this.occHabForm.stationForm.get('date_min').markAsDirty(); + this.occHabForm.stationForm.get('date_max').markAsDirty(); + } }); - } - }); + } + }) + ); } formIsDisable() { if (this.disabledForm) { - this._commonService.translateToaster( - "warning", - "Releve.FillGeometryFirst" - ); + this._commonService.translateToaster('warning', 'Releve.FillGeometryFirst'); } } // display help toaster for filelayer infoMessageFileLayer() { if (this.firstFileLayerMessage) { - this._commonService.translateToaster("info", "Map.FileLayerInfoMessage"); + this._commonService.translateToaster('info', 'Map.FileLayerInfoMessage'); } this.firstFileLayerMessage = false; } @@ -137,30 +138,19 @@ export class OccHabFormComponent implements OnInit { postStation() { const station = this.occHabForm.formatStationBeforePost(); - this._occHabDataService.postStation(station).subscribe( - data => { - this.occHabForm.resetAllForm(); - this._router.navigate(["occhab"]); - }, - error => { - if (error.status === 403) { - this._commonService.translateToaster("error", "NotAllowed"); - } else { - this._commonService.translateToaster("error", "ErrorMessage"); - } - } - ); + this._occHabDataService.createOrUpdateStation(station).subscribe((data) => { + this.occHabForm.resetAllForm(); + this._router.navigate(['occhab']); + }); } formatter(item) { return item.search_name; } - loadDatasetGeom(event) { - console.log(event); - } - ngOnDestroy() { - this._sub.unsubscribe(); + this._sub.forEach((sub) => { + sub.unsubscribe(); + }); } } diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html index ee78261b3a..c1276dd4b3 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/modal-download.component.html @@ -29,7 +29,7 @@ role="alert" > Vous pouvez néamoins exporter les données, dans une limite de - {{MODULE_CONFIG.NB_MAX_EXPORT}} + {{config.OCCHAB.NB_MAX_EXPORT}} stations en cliquant sur les liens ci-dessous
    @@ -38,7 +38,7 @@ role="alert" > En fermant cette fenêtre vous pouvez visualiser seulement les - {{MODULE_CONFIG.NB_MAX_MAP_LIST}} dernières + {{config.OCCHAB.NB_MAX_MAP_LIST}} dernières stations correspondant à votre requête.
    @@ -53,10 +53,10 @@
    Télécharger les habitats des stations

    - NB: Le limite du nombre d'habitat exportables est de {{MODULE_CONFIG.NB_MAX_EXPORT}} + NB: Le limite du nombre d'habitat exportables est de {{config.OCCHAB.NB_MAX_EXPORT}}

    -
    - +
    -
    +
    - - + - - + - - - + + + - {{row.t_habitats?.length}} - {{displayHabTooltip(row)}} + {{ row.habitats?.length }} + {{ displayHabTooltip(row) }} + - - - - - - {{ row.date_min|date:'dd-MM-yyyy' }} - - - - - - - - - {{row.dataset.dataset_name}} - - + + + + {{ row.date_min | date : 'dd-MM-yyyy' }} + + + - + + + + {{ row.dataset.dataset_name }} + + +
    -
    - - + - diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.ts index dfbc887cb2..093c809efe 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.ts @@ -1,38 +1,42 @@ -import { Component, OnInit, ViewChild, HostListener } from "@angular/core"; -import { OcchabStoreService } from "../../services/store.service"; -import { MapListService } from "@geonature_common/map-list/map-list.service"; -import { OccHabDataService } from "../../services/data.service"; -import { DatatableComponent } from "@swimlane/ngx-datatable"; -import { OccHabModalDownloadComponent } from "./modal-download.component"; -import { NgbModal, NgbActiveModal } from "@ng-bootstrap/ng-bootstrap"; -import { CommonService } from "@geonature_common/service/common.service"; -import * as moment from "moment"; -import { ModuleConfig } from "../../module.config"; +import { Component, OnInit, ViewChild, HostListener } from '@angular/core'; +import { OcchabStoreService } from '../../services/store.service'; +import { MapListService } from '@geonature_common/map-list/map-list.service'; +import { OccHabDataService } from '../../services/data.service'; +import { DatatableComponent } from '@swimlane/ngx-datatable'; +import { OccHabModalDownloadComponent } from './modal-download.component'; +import { NgbModal, NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'; +import { CommonService } from '@geonature_common/service/common.service'; +import * as moment from 'moment'; +import { ConfigService } from '@geonature/services/config.service'; +import { OccHabMapListService } from '../../services/occhab-map-list.service'; @Component({ - selector: "pnx-occhab-map-list", - templateUrl: "occhab-map-list.component.html", - styleUrls: ["./occhab-map-list.component.scss", "../responsive-map.scss"], - providers: [NgbActiveModal] + selector: 'pnx-occhab-map-list', + templateUrl: 'occhab-map-list.component.html', + styleUrls: ['./occhab-map-list.component.scss', '../responsive-map.scss'], + providers: [NgbActiveModal], }) export class OccHabMapListComponent implements OnInit { - @ViewChild("dataTable") dataTable: DatatableComponent; + @ViewChild('dataTable') dataTable: DatatableComponent; public rowNumber: number; public dataLoading = true; public deleteOne: any; + + public isCollapseFilter = true; + constructor( public storeService: OcchabStoreService, private _occHabDataService: OccHabDataService, public mapListService: MapListService, private _ngbModal: NgbModal, - private _commonService: CommonService + private _commonService: CommonService, + public config: ConfigService, + public mapListFormService: OccHabMapListService ) {} + ngOnInit() { if (this.storeService.firstMessageMapList) { - this._commonService.regularToaster( - "info", - "Les 50 dernières stations saisies" - ); + this._commonService.regularToaster('info', 'Les 50 dernières stations saisies'); this.storeService.firstMessageMapList = false; } @@ -42,7 +46,7 @@ export class OccHabMapListComponent implements OnInit { this.rowNumber = this.calculeteRowNumber(h); // observable on mapListService.currentIndexRow to find the current page - this.mapListService.currentIndexRow$.subscribe(indexRow => { + this.mapListService.currentIndexRow$.subscribe((indexRow) => { const currentPage = Math.trunc(indexRow / this.rowNumber); this.dataTable.offset = currentPage; }); @@ -61,27 +65,24 @@ export class OccHabMapListComponent implements OnInit { } // update the number of row per page when resize the window - @HostListener("window:resize", ["$event"]) + @HostListener('window:resize', ['$event']) onResize(event) { this.rowNumber = this.calculeteRowNumber(event.target.innerHeight); } getStations(params?) { + params['habitats'] = 1; this.dataLoading = true; - this._occHabDataService.getStations(params).subscribe( - featuresCollection => { + this._occHabDataService.listStations(params).subscribe( + (featuresCollection) => { // store the idsStation in the store service - if ( - featuresCollection.features.length === ModuleConfig.NB_MAX_MAP_LIST - ) { + if (featuresCollection.features.length === this.config.OCCHAB.NB_MAX_MAP_LIST) { this.openModal(true); } - this.storeService.idsStation = featuresCollection.features.map( - feature => feature.id - ); + this.storeService.idsStation = featuresCollection.features.map((feature) => feature.id); // this.stations = data; this.mapListService.tableData = []; - featuresCollection.features.forEach(feature => { + featuresCollection.features.forEach((feature) => { // add leaflet popup this.displayLeafletPopupCallback(feature); // push the data in the dataTable array @@ -91,9 +92,9 @@ export class OccHabMapListComponent implements OnInit { this.dataLoading = false; }, // error callback - e => { + (e) => { if (e.status == 500) { - this._commonService.translateToaster("error", "ErrorMessage"); + this._commonService.translateToaster('error', 'ErrorMessage'); } this.dataLoading = false; } @@ -106,7 +107,7 @@ export class OccHabMapListComponent implements OnInit { openModal(tooManyObs = false) { const ref = this._ngbModal.open(OccHabModalDownloadComponent, { - size: "lg" + size: 'lg', }); ref.componentInstance.tooManyObs = tooManyObs; } @@ -117,11 +118,11 @@ export class OccHabMapListComponent implements OnInit { displayHabTooltip(row): string[] { let tooltip = []; - if (row.t_habitats === undefined) { - tooltip.push("Aucun habitat"); + if (row.habitats === undefined) { + tooltip.push('Aucun habitat'); } else { - for (let i = 0; i < row.t_habitats.length; i++) { - let occ = row.t_habitats[i]; + for (let i = 0; i < row.habitats.length; i++) { + let occ = row.habitats[i]; tooltip.push(occ.nom_cite); } } @@ -131,15 +132,15 @@ export class OccHabMapListComponent implements OnInit { displayObservateursTooltip(row): string[] { let tooltip = []; if (row.observers === undefined) { - if (row.observers_txt !== null && row.observers_txt.trim() !== "") { + if (row.observers_txt !== null && row.observers_txt.trim() !== '') { tooltip.push(row.observers_txt.trim()); } else { - tooltip.push("Aucun observateurs"); + tooltip.push('Aucun observateurs'); } } else { for (let i = 0; i < row.observers.length; i++) { let obs = row.observers[i]; - tooltip.push([obs.prenom_role, obs.nom_role].join(" ")); + tooltip.push([obs.prenom_role, obs.nom_role].join(' ')); } } return tooltip.sort(); @@ -147,40 +148,38 @@ export class OccHabMapListComponent implements OnInit { displayDateTooltip(element): string { return element.date_min == element.date_max - ? moment(element.date_min).format("DD-MM-YYYY") - : `Du ${moment(element.date_min).format("DD-MM-YYYY")} au ${moment( - element.date_max - ).format("DD-MM-YYYY")}`; + ? moment(element.date_min).format('DD-MM-YYYY') + : `Du ${moment(element.date_min).format('DD-MM-YYYY')} au ${moment(element.date_max).format( + 'DD-MM-YYYY' + )}`; } displayLeafletPopupCallback(feature): any { - const leafletPopup: HTMLElement = document.createElement("div"); - leafletPopup.style.maxHeight = "80vh"; - leafletPopup.style.overflowY = "auto"; + const leafletPopup: HTMLElement = document.createElement('div'); + leafletPopup.style.maxHeight = '80vh'; + leafletPopup.style.overflowY = 'auto'; - const divObservateurs = document.createElement("div"); - divObservateurs.innerHTML = " Observateurs :
    "; + const divObservateurs = document.createElement('div'); + divObservateurs.innerHTML = ' Observateurs :
    '; divObservateurs.innerHTML = - divObservateurs.innerHTML + - this.displayObservateursTooltip(feature.properties).join(", "); + divObservateurs.innerHTML + this.displayObservateursTooltip(feature.properties).join(', '); - const divDate = document.createElement("div"); - divDate.innerHTML = " Date :
    "; - divDate.innerHTML = - divDate.innerHTML + this.displayDateTooltip(feature.properties); + const divDate = document.createElement('div'); + divDate.innerHTML = ' Date :
    '; + divDate.innerHTML = divDate.innerHTML + this.displayDateTooltip(feature.properties); - const divHab = document.createElement("div"); - divHab.innerHTML = " Habitats :
    "; + const divHab = document.createElement('div'); + divHab.innerHTML = ' Habitats :
    '; - divHab.style.marginTop = "5px"; - let taxons = this.displayHabTooltip(feature.properties).join("
    "); + divHab.style.marginTop = '5px'; + let taxons = this.displayHabTooltip(feature.properties).join('
    '); divHab.innerHTML = divHab.innerHTML + taxons; leafletPopup.appendChild(divObservateurs); leafletPopup.appendChild(divDate); leafletPopup.appendChild(divHab); - feature.properties["leaflet_popup"] = leafletPopup; + feature.properties['leaflet_popup'] = leafletPopup; return feature; } diff --git a/contrib/gn_module_occhab/frontend/app/gnModule.module.ts b/contrib/gn_module_occhab/frontend/app/gnModule.module.ts index 6a3eaf1dc1..f1a374fd4c 100644 --- a/contrib/gn_module_occhab/frontend/app/gnModule.module.ts +++ b/contrib/gn_module_occhab/frontend/app/gnModule.module.ts @@ -1,25 +1,35 @@ -import { NgModule } from "@angular/core"; -import { NgbModule } from "@ng-bootstrap/ng-bootstrap"; +import { NgModule } from '@angular/core'; +import { NgbModule } from '@ng-bootstrap/ng-bootstrap'; + +import { CommonModule } from '@angular/common'; +import { GN2CommonModule } from '@geonature_common/GN2Common.module'; +import { Routes, RouterModule } from '@angular/router'; +import { OccHabFormComponent } from './components/occhab-map-form/occhab-form.component'; +import { OccHabMapListComponent } from './components/occhab-map-list/occhab-map-list.component'; +import { OcchabMapListFilterComponent } from './components/occhab-map-list/occhab-map-list-filter.component'; +import { OccHabDataService } from './services/data.service'; +import { OcchabStoreService } from './services/store.service'; +import { OccHabMapListService } from './services/occhab-map-list.service'; +import { OccHabModalDownloadComponent } from './components/occhab-map-list/modal-download.component'; +import { OcchabInfoComponent } from './components/occhab-info/occhab-info.component'; +import { ModalDeleteStation } from './components/delete-modal/delete-modal.component'; +import { OccHabDatasetMapOverlayComponent } from './components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component'; +import { StationResolver } from './resolvers/station.resolver'; -import { CommonModule } from "@angular/common"; -import { GN2CommonModule } from "@geonature_common/GN2Common.module"; -import { Routes, RouterModule } from "@angular/router"; -import { OccHabFormComponent } from "./components/occhab-map-form/occhab-form.component"; -import { OccHabMapListComponent } from "./components/occhab-map-list/occhab-map-list.component"; -import { OcchabMapListFilterComponent } from "./components/occhab-map-list/occhab-map-list-filter.component"; -import { OccHabDataService } from "./services/data.service"; -import { OcchabStoreService } from "./services/store.service"; -import { OccHabMapListService } from "./services/occhab-map-list.service"; -import { OccHabModalDownloadComponent } from "./components/occhab-map-list/modal-download.component"; -import { OcchabInfoComponent } from "./components/occhab-info/occhab-info.component"; -import { ModalDeleteStation } from "./components/delete-modal/delete-modal.component"; -import { OccHabDatasetMapOverlayComponent } from "./components/occhab-map-form/dataset-map-overlay/dataset-map-overlay.component"; // my module routing const routes: Routes = [ - { path: "form", component: OccHabFormComponent }, - { path: "form/:id_station", component: OccHabFormComponent }, - { path: "", component: OccHabMapListComponent }, - { path: "info/:id_station", component: OcchabInfoComponent } + { path: '', component: OccHabMapListComponent }, + { path: 'add', component: OccHabFormComponent }, + { + path: 'edit/:id_station', + component: OccHabFormComponent, + resolve: { station: StationResolver }, + }, + { + path: 'info/:id_station', + component: OcchabInfoComponent, + resolve: { station: StationResolver }, + }, ]; @NgModule({ @@ -30,17 +40,12 @@ const routes: Routes = [ OcchabInfoComponent, OccHabModalDownloadComponent, ModalDeleteStation, - OccHabDatasetMapOverlayComponent - ], - imports: [ - CommonModule, - GN2CommonModule, - RouterModule.forChild(routes), - NgbModule + OccHabDatasetMapOverlayComponent, ], + imports: [CommonModule, GN2CommonModule, RouterModule.forChild(routes), NgbModule], entryComponents: [OccHabModalDownloadComponent], - providers: [OccHabDataService, OcchabStoreService, OccHabMapListService], - bootstrap: [] + providers: [OccHabDataService, OcchabStoreService, OccHabMapListService, StationResolver], + bootstrap: [], }) export class GeonatureModule {} diff --git a/contrib/gn_module_occhab/frontend/app/models.ts b/contrib/gn_module_occhab/frontend/app/models.ts new file mode 100644 index 0000000000..5b29d49971 --- /dev/null +++ b/contrib/gn_module_occhab/frontend/app/models.ts @@ -0,0 +1,31 @@ +export interface CRUVED { + C: boolean; + R: boolean; + U: boolean; + V: boolean; + E: boolean; + D: boolean; +} + +export interface OccurenceHabitat {} + +export interface Station { + id_station?: number; + habitats: Array; + cruved: CRUVED; +} + +export interface StationFeature { + id?: number; + type: 'Feature'; + geometry: { + type: string; + coordinates: [number, number]; + }; + properties: Station; +} + +export interface StationFeatureCollection { + type: 'FeatureCollection'; + features: Array; +} diff --git a/contrib/gn_module_occhab/frontend/app/resolvers/station.resolver.ts b/contrib/gn_module_occhab/frontend/app/resolvers/station.resolver.ts new file mode 100644 index 0000000000..c02ae84c81 --- /dev/null +++ b/contrib/gn_module_occhab/frontend/app/resolvers/station.resolver.ts @@ -0,0 +1,27 @@ +import { Injectable } from '@angular/core'; +import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot, Router } from '@angular/router'; +import { Observable, of } from 'rxjs'; + +import { CommonService } from '@geonature_common/service/common.service'; + +import { StationFeature } from '../models'; +import { OccHabDataService } from '../services/data.service'; + +@Injectable({ providedIn: 'root' }) +export class StationResolver implements Resolve { + constructor( + private service: OccHabDataService, + private commonService: CommonService, + private router: Router + ) {} + + resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable { + return this.service.getStation(+route.paramMap.get('id_station')).catch((error) => { + if (error.status == 404) { + this.commonService.translateToaster('warning', 'Station introuvable'); + } + this.router.navigate(['/occhab']); + return of(null); + }); + } +} diff --git a/contrib/gn_module_occhab/frontend/app/services/data.service.ts b/contrib/gn_module_occhab/frontend/app/services/data.service.ts index c9cf072e5d..d9f6e8493d 100644 --- a/contrib/gn_module_occhab/frontend/app/services/data.service.ts +++ b/contrib/gn_module_occhab/frontend/app/services/data.service.ts @@ -1,62 +1,73 @@ -import { Injectable } from "@angular/core"; -import { HttpClient, HttpParams, HttpHeaders } from "@angular/common/http"; -import { - DataFormService, - FormatMapMime -} from "@geonature_common/form/data-form.service"; +import { Injectable } from '@angular/core'; +import { HttpClient, HttpParams } from '@angular/common/http'; +import { DataFormService } from '@geonature_common/form/data-form.service'; +import { ConfigService } from '@geonature/services/config.service'; +import { Observable } from 'rxjs'; -import { AppConfig } from "@geonature_config/app.config"; -import { ModuleConfig } from "../module.config"; +import { StationFeature, StationFeatureCollection } from '../models'; @Injectable() export class OccHabDataService { + private OCCHAB_API; + constructor( private _http: HttpClient, - private _gnDataService: DataFormService - ) {} + private _gnDataService: DataFormService, + public config: ConfigService + ) { + this.OCCHAB_API = `${this.config.API_ENDPOINT}/occhab`; + } - postStation(data) { - return this._http.post( - `${AppConfig.API_ENDPOINT}/occhab/station`, - data - ); + listStations(params = {}): Observable { + params['format'] = 'geojson'; + return this._http.get(`${this.OCCHAB_API}/stations/`, { + params: params, + }); } - getStations(params?) { - let queryString: HttpParams = new HttpParams(); - for (let key in params) { - if (params[key]) { - queryString = queryString.set(key, params[key]); - } - } - return this._http.get( - `${AppConfig.API_ENDPOINT}/occhab/stations`, - { params: queryString } - ); + createStation(station: StationFeature, params = {}): Observable { + params['format'] = 'geojson'; + return this._http.post(`${this.OCCHAB_API}/stations/`, station, { + params: params, + }); } - getOneStation(idStation) { - return this._http.get( - `${AppConfig.API_ENDPOINT}/occhab/station/${idStation}` - ); + getStation(stationId: number, params = {}): Observable { + params['format'] = 'geojson'; + return this._http.get(`${this.OCCHAB_API}/stations/${stationId}/`); } - deleteOneStation(idStation) { - return this._http.delete( - `${AppConfig.API_ENDPOINT}/occhab/station/${idStation}` - ); + updateStation(station: StationFeature, params = {}): Observable { + params['format'] = 'geojson'; + let stationId = station.properties.id_station; + return this._http.post(`${this.OCCHAB_API}/stations/${stationId}/`, station, { + params: params, + }); + } + + createOrUpdateStation(station: StationFeature, params = {}): Observable { + let stationId = station.properties.id_station; + if (stationId) { + return this.updateStation(station, params); + } else { + return this.createStation(station, params); + } + } + + deleteStation(stationId, params = {}): Observable { + return this._http.delete(`${this.OCCHAB_API}/stations/${stationId}/`, { params: params }); } exportStations(export_format, idsStation?: Array) { const sub = this._http.post( - `${AppConfig.API_ENDPOINT}/occhab/export_stations/${export_format}`, + `${this.OCCHAB_API}/export_stations/${export_format}`, { idsStation: idsStation }, { - observe: "events", - responseType: "blob", - reportProgress: true + observe: 'events', + responseType: 'blob', + reportProgress: true, } ); - this._gnDataService.subscribeAndDownload(sub, "export_hab", export_format); + this._gnDataService.subscribeAndDownload(sub, 'export_hab', export_format); } } diff --git a/contrib/gn_module_occhab/frontend/app/services/form-service.ts b/contrib/gn_module_occhab/frontend/app/services/form-service.ts index 427b8a93e0..efa2b81ffc 100644 --- a/contrib/gn_module_occhab/frontend/app/services/form-service.ts +++ b/contrib/gn_module_occhab/frontend/app/services/form-service.ts @@ -1,52 +1,48 @@ -import { Injectable } from "@angular/core"; +import { Injectable } from '@angular/core'; import { - FormBuilder, - FormGroup, - FormControl, + UntypedFormBuilder, + UntypedFormGroup, + UntypedFormControl, Validators, AbstractControl, - FormArray -} from "@angular/forms"; -import { NgbDateParserFormatter } from "@ng-bootstrap/ng-bootstrap"; -import { FormService } from "@geonature_common/form/form.service"; -import { DataFormService } from "@geonature_common/form/data-form.service"; -import { OcchabStoreService } from "./store.service"; -import { ModuleConfig } from "../module.config"; + UntypedFormArray, +} from '@angular/forms'; +import { NgbDateParserFormatter } from '@ng-bootstrap/ng-bootstrap'; +import { FormService } from '@geonature_common/form/form.service'; +import { DataFormService } from '@geonature_common/form/data-form.service'; +import { OcchabStoreService } from './store.service'; +import { ConfigService } from '@geonature/services/config.service'; +import { Station, StationFeature } from '../models'; @Injectable() export class OcchabFormService { - public stationForm: FormGroup; - public typoHabControl = new FormControl(); + public stationForm: UntypedFormGroup; + public typoHabControl = new UntypedFormControl(); public selectedTypo: any; public currentEditingHabForm = null; constructor( - private _fb: FormBuilder, + private _fb: UntypedFormBuilder, private _dateParser: NgbDateParserFormatter, private _gn_dataSerice: DataFormService, private _storeService: OcchabStoreService, - private _formService: FormService + private _formService: FormService, + public config: ConfigService ) { // get selected cd_typo to filter the habref autcomplete - this.typoHabControl.valueChanges.subscribe(data => { + this.typoHabControl.valueChanges.subscribe((data) => { this.selectedTypo = { cd_typo: data }; }); } - initStationForm(): FormGroup { + initStationForm(): UntypedFormGroup { const stationForm = this._fb.group({ id_station: null, unique_id_sinp_station: null, id_dataset: [null, Validators.required], date_min: [null, Validators.required], date_max: [null, Validators.required], - observers: [ - null, - !ModuleConfig.OBSERVER_AS_TXT ? Validators.required : null - ], - observers_txt: [ - null, - ModuleConfig.OBSERVER_AS_TXT ? Validators.required : null - ], + observers: [null, !this.config.OCCHAB.OBSERVER_AS_TXT ? Validators.required : null], + observers_txt: [null, this.config.OCCHAB.OBSERVER_AS_TXT ? Validators.required : null], is_habitat_complex: false, id_nomenclature_exposure: null, altitude_min: null, @@ -58,66 +54,55 @@ export class OcchabFormService { id_nomenclature_geographic_object: [null, Validators.required], geom_4326: [null, Validators.required], comment: null, - t_habitats: this._fb.array([]) + habitats: this._fb.array([]), }); stationForm.setValidators([ - this._formService.dateValidator( - stationForm.get("date_min"), - stationForm.get("date_max") - ), + this._formService.dateValidator(stationForm.get('date_min'), stationForm.get('date_max')), this._formService.minMaxValidator( - stationForm.get("altitude_min"), - stationForm.get("altitude_max"), - "invalidAlt" - ) + stationForm.get('altitude_min'), + stationForm.get('altitude_max'), + 'invalidAlt' + ), ]); - this._formService.autoCompleteDate(stationForm); - return stationForm; } patchDefaultNomenclaureStation(defaultNomenclature) { this.stationForm.patchValue({ - id_nomenclature_area_surface_calculation: - defaultNomenclature["METHOD_CALCUL_SURFACE"], - id_nomenclature_geographic_object: defaultNomenclature["NAT_OBJ_GEO"] + id_nomenclature_area_surface_calculation: defaultNomenclature['METHOD_CALCUL_SURFACE'], + id_nomenclature_geographic_object: defaultNomenclature['NAT_OBJ_GEO'], }); } - initHabForm(defaultNomenclature): FormGroup { + initHabForm(defaultNomenclature): UntypedFormGroup { const habForm = this._fb.group({ - id_station: null, id_habitat: null, unique_id_sinp_hab: null, nom_cite: null, habref: [Validators.required, this.cdHabValidator], - id_nomenclature_determination_type: defaultNomenclature ? - defaultNomenclature["DETERMINATION_TYP_HAB"] : null, + id_nomenclature_determination_type: defaultNomenclature + ? defaultNomenclature['DETERMINATION_TYP_HAB'] + : null, determiner: null, id_nomenclature_community_interest: null, id_nomenclature_collection_technique: [ - defaultNomenclature ? defaultNomenclature["TECHNIQUE_COLLECT_HAB"] : null, - Validators.required + defaultNomenclature ? defaultNomenclature['TECHNIQUE_COLLECT_HAB'] : null, + Validators.required, ], recovery_percentage: null, id_nomenclature_abundance: null, - technical_precision: null + technical_precision: null, }); habForm.setValidators([this.technicalValidator]); return habForm; } technicalValidator(habForm: AbstractControl): { [key: string]: boolean } { - const technicalValue = habForm.get("id_nomenclature_collection_technique") - .value; - const technicalPrecision = habForm.get("technical_precision").value; + const technicalValue = habForm.get('id_nomenclature_collection_technique').value; + const technicalPrecision = habForm.get('technical_precision').value; - if ( - technicalValue && - technicalValue.cd_nomenclature == "10" && - !technicalPrecision - ) { + if (technicalValue && technicalValue.cd_nomenclature == '10' && !technicalPrecision) { return { invalidTechnicalValues: true }; } return null; @@ -129,7 +114,7 @@ export class OcchabFormService { return null; } else if (!currentHab.cd_hab && !currentHab.search_name) { return { - invalidTaxon: true + invalidTaxon: true, }; } else { return null; @@ -141,12 +126,9 @@ export class OcchabFormService { } addNewHab() { - const currentHabNumber = this.stationForm.value.t_habitats.length - 1; - const habFormArray = this.stationForm.controls.t_habitats as FormArray; - habFormArray.insert( - 0, - this.initHabForm(this._storeService.defaultNomenclature) - ); + const currentHabNumber = this.stationForm.value.habitats.length - 1; + const habFormArray = this.stationForm.controls.habitats as UntypedFormArray; + habFormArray.insert(0, this.initHabForm(this._storeService.defaultNomenclature)); this.currentEditingHabForm = 0; } @@ -172,14 +154,14 @@ export class OcchabFormService { * @param index index of the habitat to delete */ deleteHab(index) { - const habArrayForm = this.stationForm.controls.t_habitats as FormArray; + const habArrayForm = this.stationForm.controls.habitats as UntypedFormArray; habArrayForm.removeAt(index); } patchGeoValue(geom) { this.stationForm.patchValue({ geom_4326: geom.geometry }); this._gn_dataSerice.getAreaSize(geom).subscribe( - data => { + (data) => { this.stationForm.patchValue({ area: Math.round(data) }); }, // if error reset area @@ -192,25 +174,25 @@ export class OcchabFormService { // }); this._gn_dataSerice.getGeoInfo(geom).subscribe( - data => { + (data) => { this.stationForm.patchValue({ - altitude_min: data["altitude"]["altitude_min"], - altitude_max: data["altitude"]["altitude_max"] + altitude_min: data['altitude']['altitude_min'], + altitude_max: data['altitude']['altitude_max'], }); }, () => { this.stationForm.patchValue({ altitude_min: null, - altitude_max: null + altitude_max: null, }); } ); } patchNomCite($event) { - const habArrayForm = this.stationForm.controls.t_habitats as FormArray; + const habArrayForm = this.stationForm.controls.habitats as UntypedFormArray; habArrayForm.controls[this.currentEditingHabForm].patchValue({ - nom_cite: $event.item.search_name + nom_cite: $event.item.search_name, }); } @@ -219,8 +201,8 @@ export class OcchabFormService { * @param obj a dict with id_nomenclature key */ formatNomenclature(obj) { - Object.keys(obj).forEach(key => { - if (key.startsWith("id_nomenclature") && obj[key]) { + Object.keys(obj).forEach((key) => { + if (key.startsWith('id_nomenclature') && obj[key]) { obj[key] = obj[key].id_nomenclature; } }); @@ -234,68 +216,64 @@ export class OcchabFormService { * format the data returned by get one station to fit with the form */ formatStationAndHabtoPatch(station) { - // me - const formatedHabitats = station.t_one_habitats.map(hab => { + // me + const formatedHabitats = station.habitats.map((hab) => { // hab.habref["search_name"] = hab.nom_cite; return { ...hab, id_nomenclature_determination_type: this.getOrNull( hab, - "determination_method" + 'nomenclature_determination_method' ), id_nomenclature_collection_technique: this.getOrNull( hab, - "collection_technique" + 'nomenclature_collection_technique' ), - id_nomenclature_abundance: this.getOrNull(hab, "abundance") + id_nomenclature_abundance: this.getOrNull(hab, 'nomenclature_abundance'), }; }); - station.t_habitats.forEach((hab, index) => { - formatedHabitats[index]["habref"] = hab.habref || {}; - formatedHabitats[index]["habref"]["search_name"] = hab.nom_cite; - - }); - station["t_habitats"] = formatedHabitats; + station.habitats.forEach((hab, index) => { + formatedHabitats[index]['habref'] = hab.habref || {}; + formatedHabitats[index]['habref']['search_name'] = hab.nom_cite; + }); + station['habitats'] = formatedHabitats; return { ...station, date_min: this._dateParser.parse(station.date_min), date_max: this._dateParser.parse(station.date_max), - id_nomenclature_geographic_object: this.getOrNull( - station, - "geographic_object" - ), + id_nomenclature_geographic_object: this.getOrNull(station, 'nomenclature_geographic_object'), id_nomenclature_area_surface_calculation: this.getOrNull( station, - "area_surface_calculation" + 'nomenclature_area_surface_calculation' ), - id_nomenclature_exposure: this.getOrNull(station, "exposure") + id_nomenclature_exposure: this.getOrNull(station, 'nomenclature_exposure'), }; } patchStationForm(oneStation) { - // create t_habitat formArray - for (let i = 0; i < oneStation.properties.t_one_habitats.length; i++) { - (this.stationForm.controls.t_habitats as FormArray).push( + // create habitat formArray + for (let i = 0; i < oneStation.properties.habitats.length; i++) { + (this.stationForm.controls.habitats as UntypedFormArray).push( this.initHabForm(this._storeService.defaultNomenclature) ); } - + const formatedData = this.formatStationAndHabtoPatch(oneStation.properties); this.stationForm.patchValue(formatedData); this.stationForm.patchValue({ - geom_4326: oneStation.geometry + geom_4326: oneStation.geometry, }); this.currentEditingHabForm = null; } /** Format a station before post */ - formatStationBeforePost() { + formatStationBeforePost(): StationFeature { let formData = Object.assign({}, this.stationForm.value); //format cd_hab - formData.t_habitats.forEach(element => { + formData.habitats.forEach((element) => { if (element.habref) { element.cd_hab = element.habref.cd_hab; - delete element["habref"]; + delete element['habref']; } }); @@ -307,21 +285,21 @@ export class OcchabFormService { // format habitat nomenclatures - formData.t_habitats.forEach(element => { + formData.habitats.forEach((element) => { this.formatNomenclature(element); }); // Format data in geojson - const geom = formData["geom_4326"]; - delete formData["geom_4326"]; + const geom = formData['geom_4326']; + delete formData['geom_4326']; return { - type: "Feature", + type: 'Feature', geometry: { - ...geom + ...geom, }, properties: { - ...formData - } + ...formData, + }, }; } } diff --git a/contrib/gn_module_occhab/frontend/app/services/occhab-map-list.service.ts b/contrib/gn_module_occhab/frontend/app/services/occhab-map-list.service.ts index 00deecf4e9..a7cb90454d 100644 --- a/contrib/gn_module_occhab/frontend/app/services/occhab-map-list.service.ts +++ b/contrib/gn_module_occhab/frontend/app/services/occhab-map-list.service.ts @@ -1,22 +1,28 @@ -import { Injectable } from "@angular/core"; -import { FormBuilder, FormGroup } from "@angular/forms"; -import { MapListService } from "@geonature_common/map-list/map-list.service"; -import { OccHabDataService } from "../services/data.service"; -import * as moment from "moment"; +import { Injectable } from '@angular/core'; +import { UntypedFormBuilder, UntypedFormGroup } from '@angular/forms'; +import { MapListService } from '@geonature_common/map-list/map-list.service'; +import { OccHabDataService } from '../services/data.service'; +import * as moment from 'moment'; @Injectable() export class OccHabMapListService { - public searchForm: FormGroup; + public searchForm: UntypedFormGroup; public mapListService: MapListService; - constructor( - private _fb: FormBuilder, - private _occHabDataService: OccHabDataService - ) { + constructor(private _fb: UntypedFormBuilder, private _occHabDataService: OccHabDataService) { this.searchForm = this._fb.group({ id_dataset: null, date_low: null, date_up: null, - habitat: null + habitat: null, }); } + nbOfFilter() { + let result = 0; + Object.keys(this.searchForm.value).forEach((key) => { + if (this.searchForm.value[key]) { + result = result + 1; + } + }); + return result; + } } diff --git a/contrib/gn_module_occhab/frontend/app/services/store.service.ts b/contrib/gn_module_occhab/frontend/app/services/store.service.ts index 2c1784914f..2d547a321c 100644 --- a/contrib/gn_module_occhab/frontend/app/services/store.service.ts +++ b/contrib/gn_module_occhab/frontend/app/services/store.service.ts @@ -1,8 +1,7 @@ -import { Injectable } from "@angular/core"; -import { DataFormService } from "@geonature_common/form/data-form.service"; -import { ModuleConfig } from "../module.config"; -import { OccHabDataService } from "./data.service"; -import { Observable, BehaviorSubject } from "rxjs"; +import { Injectable } from '@angular/core'; +import { DataFormService } from '@geonature_common/form/data-form.service'; +import { Observable, BehaviorSubject } from 'rxjs'; +import { ConfigService } from '@geonature/services/config.service'; @Injectable() export class OcchabStoreService { @@ -12,42 +11,31 @@ export class OcchabStoreService { public firstMessageMapList = true; /** Current list of id_station in the map list */ public idsStation: Array; - private _defaultNomenclature$: BehaviorSubject = new BehaviorSubject( - null - ); - public defaultNomenclature$: Observable< - any - > = this._defaultNomenclature$.asObservable(); - constructor( - private _gnDataService: DataFormService, - private _occHabDataService: OccHabDataService - ) { + private _defaultNomenclature$: BehaviorSubject = new BehaviorSubject(null); + public defaultNomenclature$: Observable = this._defaultNomenclature$.asObservable(); + constructor(private _gnDataService: DataFormService, public config: ConfigService) { this._gnDataService .getNomenclatures([ - "METHOD_CALCUL_SURFACE", - "DETERMINATION_TYP_HAB", - "TECHNIQUE_COLLECT_HAB", - "HAB_INTERET_COM", - "EXPOSITION", - "NAT_OBJ_GEO", - "HAB_INTERET_COM", - "ABONDANCE_HAB" + 'METHOD_CALCUL_SURFACE', + 'DETERMINATION_TYP_HAB', + 'TECHNIQUE_COLLECT_HAB', + 'HAB_INTERET_COM', + 'EXPOSITION', + 'NAT_OBJ_GEO', + 'HAB_INTERET_COM', + 'ABONDANCE_HAB', ]) - .subscribe(data => { - data.forEach(element => { + .subscribe((data) => { + data.forEach((element) => { this.nomenclatureItems[element.mnemonique] = element.values; }); }); - this._gnDataService - .getTypologyHabitat(ModuleConfig.ID_LIST_HABITAT) - .subscribe(data => { - this.typoHabitat = data; - }); - this._gnDataService - .getDefaultNomenclatureValue("occhab") - .subscribe(data => { - this._defaultNomenclature$.next(data); - }); + this._gnDataService.getTypologyHabitat(this.config.OCCHAB.ID_LIST_HABITAT).subscribe((data) => { + this.typoHabitat = data; + }); + this._gnDataService.getDefaultNomenclatureValue('occhab').subscribe((data) => { + this._defaultNomenclature$.next(data); + }); } get defaultNomenclature() { diff --git a/contrib/gn_module_occhab/frontend/package-lock.json b/contrib/gn_module_occhab/frontend/package-lock.json new file mode 100644 index 0000000000..fd57fd6367 --- /dev/null +++ b/contrib/gn_module_occhab/frontend/package-lock.json @@ -0,0 +1,30 @@ +{ + "name": "gn_module_occhab", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "gn_module_occhab", + "version": "1.0.0", + "dependencies": { + "moment": "^2.29.4" + } + }, + "node_modules/moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", + "engines": { + "node": "*" + } + } + }, + "dependencies": { + "moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==" + } + } +} diff --git a/contrib/gn_module_occhab/frontend/package.json b/contrib/gn_module_occhab/frontend/package.json new file mode 100644 index 0000000000..2dedcbfbc0 --- /dev/null +++ b/contrib/gn_module_occhab/frontend/package.json @@ -0,0 +1,9 @@ +{ + "name": "gn_module_occhab", + "version": "1.0.0", + "licence": "GPL", + "private": true, + "dependencies": { + "moment": "^2.29.4" + } +} diff --git a/contrib/gn_module_occhab/config/conf_gn_module.toml.example b/contrib/gn_module_occhab/occhab_config.toml.example similarity index 100% rename from contrib/gn_module_occhab/config/conf_gn_module.toml.example rename to contrib/gn_module_occhab/occhab_config.toml.example diff --git a/contrib/gn_module_validation/backend/gn_module_validation/blueprint.py b/contrib/gn_module_validation/backend/gn_module_validation/blueprint.py index b385d2fa84..0bbde8d737 100644 --- a/contrib/gn_module_validation/backend/gn_module_validation/blueprint.py +++ b/contrib/gn_module_validation/backend/gn_module_validation/blueprint.py @@ -2,8 +2,9 @@ import datetime import json -from flask import Blueprint, request, jsonify, current_app +from flask import Blueprint, request, jsonify, current_app, g from flask.json import jsonify +from werkzeug.exceptions import Forbidden import sqlalchemy as sa from sqlalchemy.orm import aliased, contains_eager, selectinload from marshmallow import ValidationError @@ -28,8 +29,8 @@ @blueprint.route("", methods=["GET", "POST"]) -@permissions.check_cruved_scope("R", True, module_code="VALIDATION") -def get_synthese_data(info_role): +@permissions.check_cruved_scope("C", get_scope=True, module_code="VALIDATION") +def get_synthese_data(scope): """ Return synthese and t_validations data filtered by form params Params must have same synthese fields names @@ -38,8 +39,6 @@ def get_synthese_data(info_role): Parameters: ------------ - info_role (User): - Information about the user asking the route. Auto add with kwargs Returns ------- @@ -163,7 +162,7 @@ def get_synthese_data(info_role): query = ( SyntheseQuery(Synthese, query.selectable, filters, query_joins=query.selectable.froms[0]) - .filter_query_all_filters(info_role) + .filter_query_all_filters(g.current_user, scope) .limit(result_limit) ) @@ -195,8 +194,8 @@ def get_synthese_data(info_role): @blueprint.route("/statusNames", methods=["GET"]) -@permissions.check_cruved_scope("R", True, module_code="VALIDATION") -def get_statusNames(info_role): +@permissions.check_cruved_scope("C", module_code="VALIDATION") +def get_statusNames(): nomenclatures = ( TNomenclatures.query.join(BibNomenclaturesTypes) .filter(BibNomenclaturesTypes.mnemonique == "STATUT_VALID") @@ -214,8 +213,8 @@ def get_statusNames(info_role): @blueprint.route("/", methods=["POST"]) -@permissions.check_cruved_scope("C", True, module_code="VALIDATION") -def post_status(info_role, id_synthese): +@permissions.check_cruved_scope("C", get_scope=True, module_code="VALIDATION") +def post_status(scope, id_synthese): data = dict(request.get_json()) try: id_validation_status = data["statut"] @@ -234,10 +233,14 @@ def post_status(info_role, id_synthese): # t_validations.uuid_attached_row: synthese = Synthese.query.get_or_404(int(id)) + + if not synthese.has_instance_permission(scope): + raise Forbidden + uuid = synthese.unique_id_sinp # t_validations.id_validator: - id_validator = info_role.id_role + id_validator = g.current_user.id_role # t_validations.validation_date val_date = datetime.datetime.now() @@ -271,12 +274,15 @@ def post_status(info_role, id_synthese): @blueprint.route("/date/", methods=["GET"]) -def get_validation_date(uuid): +@permissions.check_cruved_scope("C", get_scope=True, module_code="VALIDATION") +def get_validation_date(scope, uuid): """ Retourne la date de validation pour l'observation uuid """ s = Synthese.query.filter_by(unique_id_sinp=uuid).lateraljoin_last_validation().first_or_404() + if not s.has_instance_permission(scope): + raise Forbidden if s.last_validation: return jsonify(str(s.last_validation.validation_date)) else: diff --git a/backend/geonature/core/gn_permissions/backoffice/__init__.py b/contrib/gn_module_validation/backend/gn_module_validation/migrations/__init__.py similarity index 100% rename from backend/geonature/core/gn_permissions/backoffice/__init__.py rename to contrib/gn_module_validation/backend/gn_module_validation/migrations/__init__.py diff --git a/contrib/gn_module_validation/backend/gn_module_validation/migrations/df93a68242ee_declare_permissions.py b/contrib/gn_module_validation/backend/gn_module_validation/migrations/df93a68242ee_declare_permissions.py new file mode 100644 index 0000000000..49f837c7b0 --- /dev/null +++ b/contrib/gn_module_validation/backend/gn_module_validation/migrations/df93a68242ee_declare_permissions.py @@ -0,0 +1,92 @@ +"""declare permissions + +Revision ID: df93a68242ee +Revises: 85efc9bb5a47 +Create Date: 2023-05-17 15:15:38.833529 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "df93a68242ee" +down_revision = None +branch_labels = ("validation",) +depends_on = ("f051b88a57fd",) + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('VALIDATION', 'ALL', 'C', True, 'Valider les observations') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + """ + ) + op.execute( + """ + WITH bad_permissions AS ( + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_commons.t_modules m + USING (id_module) + WHERE + m.module_code = 'VALIDATION' + EXCEPT + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.t_permissions_available pa ON + (p.id_module = pa.id_module + AND p.id_object = pa.id_object + AND p.id_action = pa.id_action) + ) + DELETE + FROM + gn_permissions.t_permissions p + USING bad_permissions bp + WHERE + bp.id_permission = p.id_permission; + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions_available pa + USING + gn_commons.t_modules m + WHERE + pa.id_module = m.id_module + AND + module_code = 'VALIDATION' + """ + ) diff --git a/contrib/gn_module_validation/config/settings.ini.sample b/contrib/gn_module_validation/config/settings.ini.sample deleted file mode 100644 index 2c64f0de26..0000000000 --- a/contrib/gn_module_validation/config/settings.ini.sample +++ /dev/null @@ -1,31 +0,0 @@ -########################## -# Database configuration # -########################## - -# PostgreSQL host -db_host=localhost - -# PostgreSQL port -db_port=5432 - -# GeoNature database name -db_name=geonature2db - -# GeoNature database owner username -user_pg=geonatadmin - -# GeoNature database owner password -user_pg_pass=my_password - -# Insert sample data (ZP and grids on PnEcrins) -insert_sample_data=true - -########################### -# Data SRID configuration # -########################### - -# GeoNature local SRID -srid_local=2154 - -# World SRID (Don't change it) -srid_world=4326 diff --git a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html index 05d8bd3d3c..561b83907d 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.html @@ -26,7 +26,7 @@ - + {{def.mnemonique }} {{def.definition_default}} diff --git a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.ts b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.ts index f3579764c8..6f1027329d 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.ts +++ b/contrib/gn_module_validation/frontend/app/components/validation-definitions/validation-definitions.component.ts @@ -1,39 +1,37 @@ -import { Component } from "@angular/core"; -import { MapListService } from "@geonature_common/map-list/map-list.service"; -import { ValidationDataService } from "../../services/data.service"; -import { CommonService } from "@geonature_common/service/common.service"; - -import { ModuleConfig } from "../../module.config"; +import { Component } from '@angular/core'; +import { ValidationDataService } from '../../services/data.service'; +import { CommonService } from '@geonature_common/service/common.service'; +import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: "pnx-validation-definitions", - templateUrl: "validation-definitions.component.html", - styleUrls: ["./validation-definitions.component.scss"], - providers: [] + selector: 'pnx-validation-definitions', + templateUrl: 'validation-definitions.component.html', + styleUrls: ['./validation-definitions.component.scss'], + providers: [], }) export class ValidationDefinitionsComponent { public definitions; public showDefinitions: Boolean = false; - public VALIDATION_CONFIG = ModuleConfig; constructor( public searchService: ValidationDataService, private _commonService: CommonService, + public config: ConfigService ) {} - getDefinitions(param) { + getDefinitions() { this.showDefinitions = !this.showDefinitions; this.searchService.getStatusNames().subscribe( - result => { + (result) => { this.definitions = result; }, - error => { - if (error.statusText === "Unknown Error") { + (error) => { + if (error.statusText === 'Unknown Error') { // show error message if no connexion - this._commonService.translateToaster("error", "ERROR: IMPOSSIBLE TO CONNECT TO SERVER"); + this._commonService.translateToaster('error', 'ERROR: IMPOSSIBLE TO CONNECT TO SERVER'); } else { // show error message if other server error - this._commonService.translateToaster("error", error.error); + this._commonService.translateToaster('error', error.error); } } ); diff --git a/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html b/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html index 17ca5179a7..866bf086f9 100644 --- a/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html +++ b/contrib/gn_module_validation/frontend/app/components/validation-modal-info-obs/validation-modal-info-obs.component.html @@ -32,7 +32,7 @@
    >