diff --git a/backend/gn_module_monitoring/migrations/6673266fb79c_remove_id_module_from_sites_complements.py b/backend/gn_module_monitoring/migrations/6673266fb79c_remove_id_module_from_sites_complements.py new file mode 100644 index 000000000..ca6d04879 --- /dev/null +++ b/backend/gn_module_monitoring/migrations/6673266fb79c_remove_id_module_from_sites_complements.py @@ -0,0 +1,54 @@ +"""remove_id_module_from_sites_complements + +Revision ID: 6673266fb79c +Revises: +Create Date: 2022-12-13 16:00:00.512562 + +""" +import sqlalchemy as sa +from alembic import op + +from gn_module_monitoring import MODULE_CODE + +# revision identifiers, used by Alembic. +revision = "6673266fb79c" +down_revision = "e64bafb13ce8" +branch_labels = None +depends_on = None + +monitorings_schema = "gn_monitoring" + + +def upgrade(): + op.drop_column("t_site_complements", "id_module", schema=monitorings_schema) + + +def downgrade(): + op.add_column( + "t_site_complements", + sa.Column( + "id_module", + sa.Integer(), + sa.ForeignKey( + f"gn_commons.t_modules.id_module", + name="fk_t_site_complements_id_module", + ondelete="CASCADE", + onupdate="CASCADE", + ), + nullable=True, + ), + schema=monitorings_schema, + ) + # Cannot use orm here because need the model to be "downgraded" as well + # Need to set nullable True above for existing rows + # FIXME: find a better way because need to assign a module... + statement = sa.text( + f""" + update {monitorings_schema}.t_site_complements + set id_module = (select id_module + from gn_commons.t_modules tm + where module_code = :module_code); + """ + ).bindparams(module_code=MODULE_CODE) + op.execute(statement) + op.alter_column("t_site_complements", "id_module", nullable=False, schema=monitorings_schema) diff --git a/backend/gn_module_monitoring/monitoring/models.py b/backend/gn_module_monitoring/monitoring/models.py index 5559eb782..b561c6657 100644 --- a/backend/gn_module_monitoring/monitoring/models.py +++ b/backend/gn_module_monitoring/monitoring/models.py @@ -179,7 +179,7 @@ class TMonitoringVisits(TBaseVisits): ) -@geoserializable +@geoserializable(geoCol="geom", idCol="id_base_site") class TMonitoringSites(TBaseSites): __tablename__ = 't_site_complements' @@ -194,10 +194,6 @@ class TMonitoringSites(TBaseSites): primary_key=True ) - id_module = DB.Column( - DB.ForeignKey('gn_commons.t_modules.id_module'), - nullable=False, - ) id_sites_group = DB.Column( DB.ForeignKey('gn_monitoring.t_sites_groups.id_sites_group', diff --git a/backend/gn_module_monitoring/monitoring/schemas.py b/backend/gn_module_monitoring/monitoring/schemas.py new file mode 100644 index 000000000..2721de0e8 --- /dev/null +++ b/backend/gn_module_monitoring/monitoring/schemas.py @@ -0,0 +1,57 @@ +import json + +import geojson +from marshmallow import Schema, fields +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema +from pypnnomenclature.schemas import NomenclatureSchema + +from gn_module_monitoring.monitoring.models import ( + BibCategorieSite, + TMonitoringSites, + TMonitoringSitesGroups, +) + + +def paginate_schema(schema): + class PaginationSchema(Schema): + count = fields.Integer() + limit = fields.Integer() + offset = fields.Integer() + items = fields.Nested(schema, many=True, dump_only=True) + + return PaginationSchema + + +class MonitoringSitesGroupsSchema(SQLAlchemyAutoSchema): + class Meta: + model = TMonitoringSitesGroups + exclude = ("geom_geojson",) + + geometry = fields.Method("serialize_geojson", dump_only=True) + + def serialize_geojson(self, obj): + if obj.geom_geojson is not None: + return json.loads(obj.geom_geojson) + + +class MonitoringSitesSchema(SQLAlchemyAutoSchema): + class Meta: + model = TMonitoringSites + exclude = ("geom_geojson", "geom") + + geometry = fields.Method("serialize_geojson", dump_only=True) + + def serialize_geojson(self, obj): + if obj.geom is not None: + return geojson.dumps(obj.as_geofeature().get("geometry")) + + +class BibCategorieSiteSchema(SQLAlchemyAutoSchema): + site_type = fields.Nested( + NomenclatureSchema(only=("id_nomenclature", "label_fr")), many=True, dump_only=True + ) + + class Meta: + model = BibCategorieSite + include_fk = True + load_instance = True diff --git a/backend/gn_module_monitoring/routes/site.py b/backend/gn_module_monitoring/routes/site.py index b55d2a07e..cb10c21ca 100644 --- a/backend/gn_module_monitoring/routes/site.py +++ b/backend/gn_module_monitoring/routes/site.py @@ -5,6 +5,7 @@ from gn_module_monitoring.blueprint import blueprint from gn_module_monitoring.monitoring.models import BibCategorieSite +from gn_module_monitoring.monitoring.schemas import MonitoringSitesSchema,BibCategorieSiteSchema from gn_module_monitoring.utils.routes import filter_params, get_limit_offset, paginate @@ -16,15 +17,20 @@ def get_categories(): query = filter_params(query=BibCategorieSite.query, params=params) query = query.order_by(BibCategorieSite.id_categorie) - return paginate(query=query, object_name="categories", limit=limit, page=page, depth=1) + return paginate( + query=query, + schema=BibCategorieSiteSchema, + limit=limit, + page=page, + ) @blueprint.route("/sites/categories/", methods=["GET"]) def get_categories_by_id(id_categorie): query = BibCategorieSite.query.filter_by(id_categorie=id_categorie) res = query.first() - - return jsonify(res.as_dict()) + schema = BibCategorieSiteSchema() + return schema.dump(res) @blueprint.route("/sites", methods=["GET"]) @@ -36,7 +42,12 @@ def get_sites(): BibCategorieSite, TBaseSites.id_categorie == BibCategorieSite.id_categorie ) query = filter_params(query=query, params=params) - return paginate(query=query, object_name="sites", limit=limit, page=page) + return paginate( + query=query, + schema=MonitoringSitesSchema, + limit=limit, + page=page, + ) @blueprint.route("/sites/module/", methods=["GET"]) diff --git a/backend/gn_module_monitoring/routes/sites_groups.py b/backend/gn_module_monitoring/routes/sites_groups.py index bb3835e5d..7ea5609df 100644 --- a/backend/gn_module_monitoring/routes/sites_groups.py +++ b/backend/gn_module_monitoring/routes/sites_groups.py @@ -3,8 +3,8 @@ from gn_module_monitoring.blueprint import blueprint from gn_module_monitoring.monitoring.models import TMonitoringSitesGroups -from gn_module_monitoring.utils.routes import (filter_params, get_limit_offset, - paginate) +from gn_module_monitoring.monitoring.schemas import MonitoringSitesGroupsSchema +from gn_module_monitoring.utils.routes import filter_params, get_limit_offset, paginate @blueprint.route("/sites_groups", methods=["GET"]) @@ -15,4 +15,9 @@ def get_sites_groups(): query = filter_params(query=TMonitoringSitesGroups.query, params=params) query = query.order_by(TMonitoringSitesGroups.id_sites_group) - return paginate(query=query, object_name="sites_groups", limit=limit, page=page) + return paginate( + query=query, + schema=MonitoringSitesGroupsSchema, + limit=limit, + page=page, + ) diff --git a/backend/gn_module_monitoring/tests/fixtures/site.py b/backend/gn_module_monitoring/tests/fixtures/site.py index 4325c0bf5..415ffe321 100644 --- a/backend/gn_module_monitoring/tests/fixtures/site.py +++ b/backend/gn_module_monitoring/tests/fixtures/site.py @@ -1,18 +1,18 @@ import pytest from geoalchemy2.shape import from_shape -from geonature.core.gn_monitoring.models import TBaseSites from geonature.utils.env import db from pypnnomenclature.models import BibNomenclaturesTypes, TNomenclatures from shapely.geometry import Point -from gn_module_monitoring.monitoring.models import BibCategorieSite +from gn_module_monitoring.monitoring.models import BibCategorieSite, TMonitoringSites +from gn_module_monitoring.tests.fixtures.sites_groups import sites_groups @pytest.fixture() def site_type(): return TNomenclatures.query.filter( - BibNomenclaturesTypes.mnemonique == "TYPE_SITE", TNomenclatures.mnemonique == "Grotte" - ).one() + BibNomenclaturesTypes.mnemonique == "TYPE_SITE", TNomenclatures.mnemonique == "Grotte" + ).one() @pytest.fixture() @@ -31,7 +31,7 @@ def categories(site_type): @pytest.fixture() -def sites(users, categories): +def sites(users, categories, sites_groups): user = users["user"] geom_4326 = from_shape(Point(43, 24), srid=4326) sites = {} @@ -40,7 +40,7 @@ def sites(users, categories): BibNomenclaturesTypes.mnemonique == "TYPE_SITE", TNomenclatures.mnemonique == "Grotte" ).one() for i, key in enumerate(categories.keys()): - sites[key] = TBaseSites( + sites[key] = TMonitoringSites( id_inventor=user.id_role, id_digitiser=user.id_role, base_site_name=f"Site{i}", @@ -49,6 +49,7 @@ def sites(users, categories): geom=geom_4326, id_nomenclature_type_site=site_type.id_nomenclature, id_categorie=categories[key].id_categorie, + id_sites_group=sites_groups["Site_Groupe"].id_sites_group, ) with db.session.begin_nested(): db.session.add_all(sites.values()) diff --git a/backend/gn_module_monitoring/tests/test_routes/test_site.py b/backend/gn_module_monitoring/tests/test_routes/test_site.py index 04dc22995..6bddc90ca 100644 --- a/backend/gn_module_monitoring/tests/test_routes/test_site.py +++ b/backend/gn_module_monitoring/tests/test_routes/test_site.py @@ -1,6 +1,8 @@ import pytest from flask import url_for +from gn_module_monitoring.monitoring.schemas import BibCategorieSiteSchema, MonitoringSitesSchema + @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestSite: @@ -15,22 +17,28 @@ def test_get_categories_by_id(self, categories): assert r.json["label"] == cat.label def test_get_categories(self, categories): + schema = BibCategorieSiteSchema() + r = self.client.get(url_for("monitorings.get_categories")) assert r.json["count"] >= len(categories) - assert all([cat.as_dict(depth=1) in r.json["categories"] for cat in categories.values()]) + assert all( + [schema.dump(cat) in r.json["items"] for cat in categories.values()] + ) def test_get_categories_label(self, categories): label = list(categories.keys())[0] - + schema = BibCategorieSiteSchema() r = self.client.get(url_for("monitorings.get_categories"), query_string={"label": label}) - assert categories[label].as_dict(depth=1) in r.json["categories"] + assert schema.dump(categories[label]) in r.json["items"] def test_get_sites(self, sites): + schema = MonitoringSitesSchema() + r = self.client.get(url_for("monitorings.get_sites")) assert r.json["count"] >= len(sites) - assert any([site.as_dict() in r.json["sites"] for site in sites.values()]) + assert any([schema.dump(site) in r.json["items"] for site in sites.values()]) def test_get_module_sites(self): module_code = "TEST" diff --git a/backend/gn_module_monitoring/tests/test_routes/test_sites_groups.py b/backend/gn_module_monitoring/tests/test_routes/test_sites_groups.py index 41701d18c..a81abf66d 100644 --- a/backend/gn_module_monitoring/tests/test_routes/test_sites_groups.py +++ b/backend/gn_module_monitoring/tests/test_routes/test_sites_groups.py @@ -1,6 +1,9 @@ import pytest from flask import url_for +from gn_module_monitoring.monitoring.models import TMonitoringSitesGroups +from gn_module_monitoring.monitoring.schemas import MonitoringSitesGroupsSchema + @pytest.mark.usefixtures("client_class", "temporary_transaction") class TestSitesGroups: @@ -8,16 +11,31 @@ def test_get_sites_groups(self, sites_groups): r = self.client.get(url_for("monitorings.get_sites_groups")) assert r.json["count"] >= len(sites_groups) - assert all([group.as_dict() in r.json["sites_groups"] for group in sites_groups.values()]) + assert all( + [ + MonitoringSitesGroupsSchema().dump(group) in r.json["items"] + for group in sites_groups.values() + ] + ) def test_get_sites_groups_filter_name(self, sites_groups): name, name_not_present = list(sites_groups.keys()) + schema = MonitoringSitesGroupsSchema() r = self.client.get( url_for("monitorings.get_sites_groups"), query_string={"sites_group_name": name} ) assert r.json["count"] >= 1 - json_sites_groups = r.json["sites_groups"] - assert sites_groups[name].as_dict() in json_sites_groups - assert sites_groups[name_not_present].as_dict() not in json_sites_groups + json_sites_groups = r.json["items"] + assert schema.dump(sites_groups[name]) in json_sites_groups + assert schema.dump(sites_groups[name_not_present]) not in json_sites_groups + + def test_serialize_sites_groups(self, sites_groups, sites): + groups = TMonitoringSitesGroups.query.filter( + TMonitoringSitesGroups.id_sites_group.in_( + [s.id_sites_group for s in sites_groups.values()] + ) + ).all() + schema = MonitoringSitesGroupsSchema() + assert [schema.dump(site) for site in groups] diff --git a/backend/gn_module_monitoring/utils/routes.py b/backend/gn_module_monitoring/utils/routes.py index b6c16a1d7..5a75c1c4b 100644 --- a/backend/gn_module_monitoring/utils/routes.py +++ b/backend/gn_module_monitoring/utils/routes.py @@ -2,22 +2,23 @@ from flask import Response from flask.json import jsonify +from marshmallow import Schema from sqlalchemy.orm import Query from werkzeug.datastructures import MultiDict +from gn_module_monitoring.monitoring.schemas import paginate_schema + def get_limit_offset(params: MultiDict) -> Tuple[int]: return params.pop("limit", 50), params.pop("offset", 1) -def paginate(query: Query, object_name: str, limit: int, page: int, depth: int = 0) -> Response: +def paginate(query: Query, schema: Schema, limit: int, page: int) -> Response: result = query.paginate(page=page, error_out=False, max_per_page=limit) - data = { - object_name: [res.as_dict(depth=depth) for res in result.items], - "count": result.total, - "limit": limit, - "offset": page - 1, - } + pagination_schema = paginate_schema(schema) + data = pagination_schema().dump( + dict(items=result.items, count=result.total, limit=limit, offset=page - 1) + ) return jsonify(data)