From 38aea2fbd94d87edae02f7dcb640a316e57fc8bd Mon Sep 17 00:00:00 2001 From: 20cents Date: Mon, 16 Dec 2024 14:41:31 +0100 Subject: [PATCH] Feat (import): Support constant values in the mapping (#3271) * feat(import): can post fieldmapping in new structured format * fix(import): insert_import_data_in_transient_table * fix(import): get_import_values * fix(import): prepare_import * feat(import): support default values * feat(import): default value edition support nomenclature widget refactor(import): minor refact on getFieldMappingValues() * feat(import): improve default value json edition * feat(import): present default values on report * feat(import): db migration * feat(import): test default value * test(import): support fieldmapping format v2 * feat(import): dynamic form on default values * fix(import): no longer send empty string for date default value * fix(import): test field jsonschema_definitions * feat(import): fix occhab revisions * feat(import): rename obsolete revision * style(import): code format * feat(import): fieldmapping validators deal with default values * feat(import): occhab revision to set bib_fields.type_field * Fix (migration) move migration to geonature branch --------- Co-authored-by: Pierre-Narcisi --- .../core/gn_synthese/imports/actions.py | 20 +- .../core/imports/checks/dataframe/utils.py | 6 +- .../geonature/core/imports/checks/sql/core.py | 16 +- .../core/imports/checks/sql/utils.py | 2 +- backend/geonature/core/imports/models.py | 27 ++- .../geonature/core/imports/routes/fields.py | 3 + .../geonature/core/imports/routes/imports.py | 7 +- backend/geonature/core/imports/utils.py | 53 ++++-- ...ab56_type_field_conforms_to_type_widget.py | 59 ++++++ ...43b01a18850_fieldmapping_default_values.py | 55 ++++++ .../tests/imports/jsonschema_definitions.py | 11 +- .../tests/imports/test_imports_occhab.py | 2 +- .../tests/imports/test_imports_synthese.py | 22 ++- .../geonature/tests/imports/test_mappings.py | 28 +-- .../gn_module_occhab/imports/actions.py | 10 +- .../650f1d749b3b_add_default_mapping.py | 175 ++++++++++++++++++ .../aed662bbd88a_add_default_mapping.py | 145 --------------- .../migrations/aed662bbd88a_do_nothing.py | 21 +++ ...bib_type_field_conforms_to_dynamic_form.py | 98 ++++++++++ .../dynamic-form/dynamic-form.component.html | 1 + .../app/GN2CommonModule/form/form.service.ts | 40 ++-- .../form/taxonomy/taxonomy.component.ts | 3 +- .../fields-mapping-step.component.html | 2 +- .../fields-mapping-step.component.ts | 31 +++- .../mapping-theme.component.html | 6 + .../mapping-theme/mapping-theme.component.ts | 8 +- .../import_report.component.html | 2 + .../import_report/import_report.component.ts | 4 +- .../modules/imports/models/mapping.model.ts | 16 +- .../mappings/field-mapping.service.ts | 174 +++++++++++++---- 30 files changed, 780 insertions(+), 267 deletions(-) create mode 100644 backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py create mode 100644 backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py delete mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/e43f039b5ff1_bib_type_field_conforms_to_dynamic_form.py diff --git a/backend/geonature/core/gn_synthese/imports/actions.py b/backend/geonature/core/gn_synthese/imports/actions.py index fc501b1d53..6b8b7d356f 100644 --- a/backend/geonature/core/gn_synthese/imports/actions.py +++ b/backend/geonature/core/gn_synthese/imports/actions.py @@ -88,7 +88,7 @@ def check_transient_data(task, logger, imprt: TImports): selected_fields = { field_name: fields[field_name] for field_name, source_field in imprt.fieldmapping.items() - if source_field in imprt.columns + if source_field.get("column_src", None) in imprt.columns } init_rows_validity(imprt) task.update_state(state="PROGRESS", meta={"progress": 0.05}) @@ -218,7 +218,15 @@ def update_batch_progress(batch, step): do_nomenclatures_mapping( imprt, entity, - selected_fields, + { + field_name: fields[field_name] + for field_name, mapping in imprt.fieldmapping.items() + if field_name in fields + and ( + mapping.get("column_src", None) in imprt.columns + or mapping.get("default_value") is not None + ) + }, fill_with_defaults=current_app.config["IMPORT"][ "FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE" ], @@ -339,11 +347,15 @@ def import_data_to_destination(imprt: TImports) -> None: if field_name not in fields: # not a destination field continue field = fields[field_name] + column_src = source_field.get("column_src", None) if field.multi: - if not set(source_field).isdisjoint(imprt.columns): + if not set(column_src).isdisjoint(imprt.columns): insert_fields |= {field} else: - if source_field in imprt.columns: + if ( + column_src in imprt.columns + or source_field.get("default_value", None) is not None + ): insert_fields |= {field} insert_fields -= {fields["unique_dataset_id"]} # Column only used for filling `id_dataset` diff --git a/backend/geonature/core/imports/checks/dataframe/utils.py b/backend/geonature/core/imports/checks/dataframe/utils.py index 4257442559..6dff9ff6d1 100644 --- a/backend/geonature/core/imports/checks/dataframe/utils.py +++ b/backend/geonature/core/imports/checks/dataframe/utils.py @@ -7,7 +7,7 @@ from geonature.utils.env import db -from geonature.core.imports.models import ImportUserError, ImportUserErrorType +from geonature.core.imports.models import ImportUserError, ImportUserErrorType, TImports from geonature.core.imports.utils import generated_fields @@ -101,7 +101,7 @@ def __error_replace(*args, **kwargs): return _error_replace -def report_error(imprt, entity, df, error): +def report_error(imprt: TImports, entity, df, error): """ Reports an error found in the dataframe, updates the validity column and insert the error in the `t_user_errors` table. @@ -147,7 +147,7 @@ def report_error(imprt, entity, df, error): # f'{error_type.name}' # FIXME comment ordered_invalid_rows = sorted(invalid_rows["line_no"]) column = generated_fields.get(error["column"], error["column"]) - column = imprt.fieldmapping.get(column, column) + column = imprt.fieldmapping.get(column, {}).get("column_src", column) # If an error for same import, same column and of the same type already exists, # we concat existing erroneous rows with current rows. stmt = pg_insert(ImportUserError).values( diff --git a/backend/geonature/core/imports/checks/sql/core.py b/backend/geonature/core/imports/checks/sql/core.py index 7fa9077346..c2c62200c3 100644 --- a/backend/geonature/core/imports/checks/sql/core.py +++ b/backend/geonature/core/imports/checks/sql/core.py @@ -36,9 +36,10 @@ def init_rows_validity(imprt: TImports, dataset_name_field: str = "id_dataset"): # as rows with multi-entity field only will raise an ORPHAN_ROW error selected_fields_names = [] for field_name, source_field in imprt.fieldmapping.items(): - if type(source_field) == list: - selected_fields_names.extend(set(source_field) & set(imprt.columns)) - elif source_field in imprt.columns: + column_src = source_field.get("column_src", None) + if type(column_src) == list: + selected_fields_names.extend(set(column_src) & set(imprt.columns)) + elif column_src in imprt.columns: selected_fields_names.append(field_name) for entity in entities: # Select fields associated to this entity *and only to this entity* @@ -64,15 +65,16 @@ def init_rows_validity(imprt: TImports, dataset_name_field: str = "id_dataset"): ) -def check_orphan_rows(imprt): +def check_orphan_rows(imprt: TImports): transient_table = imprt.destination.get_transient_table() # TODO: handle multi-source fields # This is actually not a big issue as multi-source fields are unlikely to also be multi-entity fields. selected_fields_names = [] for field_name, source_field in imprt.fieldmapping.items(): - if type(source_field) == list: - selected_fields_names.extend(set(source_field) & set(imprt.columns)) - elif source_field in imprt.columns: + column_src = source_field.get("column_src", None) + if type(column_src) == list: + selected_fields_names.extend(set(column_src) & set(imprt.columns)) + elif column_src in imprt.columns: selected_fields_names.append(field_name) # Select fields associated to multiple entities AllEntityField = sa.orm.aliased(EntityField) diff --git a/backend/geonature/core/imports/checks/sql/utils.py b/backend/geonature/core/imports/checks/sql/utils.py index e07168b001..403cbca4f1 100644 --- a/backend/geonature/core/imports/checks/sql/utils.py +++ b/backend/geonature/core/imports/checks/sql/utils.py @@ -64,7 +64,7 @@ def report_erroneous_rows( transient_table = imprt.destination.get_transient_table() error_type = ImportUserErrorType.query.filter_by(name=error_type).one() error_column = generated_fields.get(error_column, error_column) - error_column = imprt.fieldmapping.get(error_column, error_column) + error_column = imprt.fieldmapping.get(error_column, {}).get("column_src", error_column) if error_type.level in level_validity_mapping: assert entity is not None cte = ( diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py index 958b359a9d..e95ff8ac6e 100644 --- a/backend/geonature/core/imports/models.py +++ b/backend/geonature/core/imports/models.py @@ -479,6 +479,7 @@ class BibFields(db.Model): fr_label = db.Column(db.Unicode, nullable=False) eng_label = db.Column(db.Unicode, nullable=True) type_field = db.Column(db.Unicode, nullable=True) + type_field_params = db.Column(MutableDict.as_mutable(JSON)) mandatory = db.Column(db.Boolean, nullable=False) autogenerated = db.Column(db.Boolean, nullable=False) mnemonique = db.Column(db.Unicode, db.ForeignKey(BibNomenclaturesTypes.mnemonique)) @@ -608,7 +609,7 @@ def optional_conditions_to_jsonschema(name_field: str, optional_conditions: Iter "if": { "not": { "properties": { - field_opt: {"type": "string"} for field_opt in optional_conditions + field_opt: {"type": "object"} for field_opt in optional_conditions } } }, @@ -726,9 +727,27 @@ def validate_values(field_mapping_json): "type": "object", "properties": { field.name_field: { - "type": ( - "boolean" if field.autogenerated else ("array" if field.multi else "string") - ), + "type": "object", + "properties": { + "column_src": { + "type": ( + "boolean" + if field.autogenerated + else ("array" if field.multi else "string") + ), + }, + "default_value": { + "oneOf": [ + {"type": "boolean"}, + {"type": "number"}, + {"type": "string"}, + {"type": "array"}, + ] + }, + }, + "required": [], + "additionalProperties": False, + "anyOf": [{"required": ["column_src"]}, {"required": ["default_value"]}], } for field in fields }, diff --git a/backend/geonature/core/imports/routes/fields.py b/backend/geonature/core/imports/routes/fields.py index c1ad4b52fe..beaf003800 100644 --- a/backend/geonature/core/imports/routes/fields.py +++ b/backend/geonature/core/imports/routes/fields.py @@ -70,8 +70,11 @@ def get_fields(scope, destination): fields=[ "id_field", "name_field", + "type_field", + "type_field_params", "fr_label", "eng_label", + "mnemonique", "mandatory", "autogenerated", "multi", diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py index 12284a1e3f..92b1dbb819 100644 --- a/backend/geonature/core/imports/routes/imports.py +++ b/backend/geonature/core/imports/routes/imports.py @@ -368,8 +368,11 @@ def get_import_values(scope, imprt): # this nomenclated field is not mapped continue source = imprt.fieldmapping[field.name_field] - if source not in imprt.columns: - # the file do not contain this field expected by the mapping + if ( + source.get("column_src", None) not in imprt.columns + and source.get("default_value", None) is None + ): + # the file do not contain this field expected by the mapping and there is no default value continue # TODO: vérifier que l’on a pas trop de valeurs différentes ? column = field.source_column diff --git a/backend/geonature/core/imports/utils.py b/backend/geonature/core/imports/utils.py index 780b110a48..63454d88cd 100644 --- a/backend/geonature/core/imports/utils.py +++ b/backend/geonature/core/imports/utils.py @@ -4,7 +4,7 @@ import json from enum import IntEnum from datetime import datetime, timedelta -from typing import IO, Any, Dict, Iterable, List, Optional, Set, Tuple +from typing import IO, Any, Dict, Iterable, List, Optional, Set, Tuple, Union from flask import current_app, render_template import sqlalchemy as sa @@ -163,7 +163,9 @@ def detect_separator(file_: IO, encoding: str) -> Optional[str]: return dialect.delimiter -def preprocess_value(dataframe: pd.DataFrame, field: BibFields, source_col: str) -> pd.Series: +def preprocess_value( + dataframe: pd.DataFrame, field: BibFields, source_col: Union[str, List[str]], default_value: Any +) -> pd.Series: """ Preprocesses values in a DataFrame depending if the field contains multiple values (e.g. additional_data) or not. @@ -184,8 +186,14 @@ def preprocess_value(dataframe: pd.DataFrame, field: BibFields, source_col: str) """ def build_additional_data(columns: dict): + try: + default_values = json.loads(default_value) + except Exception: + default_values = {} result = {} for key, value in columns.items(): + if value is None or value == "": + value = default_values.get(key, None) if value is None: continue try: @@ -198,9 +206,17 @@ def build_additional_data(columns: dict): if field.multi: assert type(source_col) is list + for col in source_col: + if col not in dataframe.columns: + dataframe[col] = None col = dataframe[source_col].apply(build_additional_data, axis=1) else: + if source_col not in dataframe.columns: + dataframe[source_col] = None col = dataframe[source_col] + if default_value is not None: + col = col.replace({"": default_value, None: default_value}) + return col @@ -244,8 +260,10 @@ def insert_import_data_in_transient_table(imprt: TImports) -> int: } data.update( { - dest_field: preprocess_value(chunk, source_field["field"], source_field["value"]) - for dest_field, source_field in fieldmapping.items() + dest_field: preprocess_value( + chunk, mapping["field"], mapping["column_src"], mapping["default_value"] + ) + for dest_field, mapping in fieldmapping.items() } ) # XXX keep extra_fields in t_imports_synthese? or add config argument? @@ -293,21 +311,25 @@ def build_fieldmapping( for field in fields: if field.name_field in imprt.fieldmapping: + mapping = imprt.fieldmapping[field.name_field] + column_src = mapping.get("column_src", None) + default_value = mapping.get("default_value", None) if field.multi: - correct = list(set(columns) & set(imprt.fieldmapping[field.name_field])) + correct = list(set(columns) & set(column_src)) if len(correct) > 0: fieldmapping[field.source_column] = { - "value": correct, "field": field, + "column_src": correct, + "default_value": default_value, } used_columns.extend(correct) else: - if imprt.fieldmapping[field.name_field] in columns: - fieldmapping[field.source_column] = { - "value": imprt.fieldmapping[field.name_field], - "field": field, - } - used_columns.append(imprt.fieldmapping[field.name_field]) + fieldmapping[field.source_column] = { + "field": field, + "column_src": column_src, + "default_value": default_value, + } + used_columns.append(column_src) return fieldmapping, used_columns @@ -442,8 +464,11 @@ def get_mapping_data(import_: TImports, entity: Entity): fields = {ef.field.name_field: ef.field for ef in entity.fields} selected_fields = { field_name: fields[field_name] - for field_name, source_field in import_.fieldmapping.items() - if source_field in import_.columns and field_name in fields + for field_name, mapping in import_.fieldmapping.items() + if ( + mapping.get("column_src") in import_.columns or mapping.get("default_value") is not None + ) + and field_name in fields } source_cols = set() for field in selected_fields.values(): diff --git a/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py b/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py new file mode 100644 index 0000000000..3e36f384c2 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py @@ -0,0 +1,59 @@ +"""bib_field.type_field conforms to dynamic_form.type_widget + +Revision ID: a94bea44ab56 +Revises: e43b01a18850 +Create Date: 2024-12-11 15:44:52.912515 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "a94bea44ab56" +down_revision = "e43b01a18850" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE gn_imports.bib_fields ADD type_field_params jsonb NULL; + """ + ) + op.execute( + """ + UPDATE gn_imports.bib_fields + SET type_field = + case + -- mnemonique is handled front side + WHEN mnemonique IS NOT NULL AND mnemonique != '' THEN NULL + + -- multi is handled front side + WHEN multi = true THEN null + + WHEN type_field IN ('integer', 'real') THEN 'number' + + WHEN type_field IN ('geometry', 'jsonb', 'json', 'wkt') THEN 'textarea' + + WHEN type_field LIKE 'timestamp%' THEN 'date' + + WHEN type_field ~ '^character varying\((\d+)\)$' + AND COALESCE(substring(type_field FROM '\d+')::int, 0) > 68 THEN 'textarea' + + -- Default: garder la valeur actuelle. + ELSE NULL + END; + """ + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_imports.bib_fields DROP COLUMN type_field_params; + + """ + ) diff --git a/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py b/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py new file mode 100644 index 0000000000..48397cbf46 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py @@ -0,0 +1,55 @@ +"""fieldmapping default values + +Revision ID: e43b01a18850 +Revises: 6734d8f7eb2a +Create Date: 2024-11-28 17:33:06.243150 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "e43b01a18850" +down_revision = "6734d8f7eb2a" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """UPDATE gn_imports.t_fieldmappings + SET "values" = ( + SELECT json_object_agg(key, json_build_object('column_src', value)) + FROM json_each("values") + ) + WHERE "values" IS NOT NULL;""" + ) + op.execute( + """UPDATE gn_imports.t_imports + SET fieldmapping = ( + SELECT json_object_agg(key, json_build_object('column_src', value)) + FROM json_each(fieldmapping) + ) + WHERE fieldmapping IS NOT NULL;""" + ) + + +def downgrade(): + op.execute( + """UPDATE gn_imports.t_fieldmappings + SET "values" = ( + SELECT json_object_agg(key, value->'column_src') + FROM json_each("values") + ) + WHERE "values" IS NOT NULL;""" + ) + op.execute( + """UPDATE gn_imports.t_imports + SET fieldmapping = ( + SELECT json_object_agg(key, value->'column_src') + FROM json_each(fieldmapping) + ) + WHERE fieldmapping IS NOT NULL;""" + ) diff --git a/backend/geonature/tests/imports/jsonschema_definitions.py b/backend/geonature/tests/imports/jsonschema_definitions.py index 42a94a4520..a968b52f32 100644 --- a/backend/geonature/tests/imports/jsonschema_definitions.py +++ b/backend/geonature/tests/imports/jsonschema_definitions.py @@ -36,7 +36,16 @@ ], }, "type_field": { - "type": "string", + "type": [ + "string", + "null", + ], + }, + "type_field_params": { + "type": [ + "object", + "null", + ], }, "synthese_field": { "type": "boolean", diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index b0fe44064f..4865cde719 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -60,7 +60,7 @@ def fieldmapping(occhab_destination): .unique() .all() ) - return {field.name_field: field.name_field for field in fields} + return {field.name_field: {"column_src": field.name_field} for field in fields} @pytest.fixture() diff --git a/backend/geonature/tests/imports/test_imports_synthese.py b/backend/geonature/tests/imports/test_imports_synthese.py index 21d6ea956b..b7308bd313 100644 --- a/backend/geonature/tests/imports/test_imports_synthese.py +++ b/backend/geonature/tests/imports/test_imports_synthese.py @@ -208,11 +208,15 @@ def fieldmapping(import_file_name, autogenerate): else: bib_fields = db.session.scalars(sa.select(BibFields).filter_by(display=True)).unique().all() return { - field.name_field: ( - autogenerate - if field.autogenerated - else ([field.name_field, "additional_data2"] if field.multi else field.name_field) - ) + field.name_field: { + "column_src": ( + autogenerate + if field.autogenerated + else ( + [field.name_field, "additional_data2"] if field.multi else field.name_field + ) + ) + } for field in bib_fields } @@ -927,16 +931,20 @@ def test_import_valid_file(self, users, datasets): .unique() .scalar_one() ) + fieldmapping_values = fieldmapping.values.copy() + fieldmapping_values.update( + {"count_max": fieldmapping_values.get("count_max", {}) | {"default_value": 5}} + ) r = self.client.post( url_for("import.set_import_field_mapping", import_id=imprt.id_import), - data=fieldmapping.values, + data=fieldmapping_values, ) assert r.status_code == 200, r.data validate_json( r.json, {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, ) - assert r.json["fieldmapping"] == fieldmapping.values + assert r.json["fieldmapping"] == fieldmapping_values # Loading step r = self.client.post(url_for("import.load_import", import_id=imprt.id_import)) diff --git a/backend/geonature/tests/imports/test_mappings.py b/backend/geonature/tests/imports/test_mappings.py index 4988287198..38dbab98d7 100644 --- a/backend/geonature/tests/imports/test_mappings.py +++ b/backend/geonature/tests/imports/test_mappings.py @@ -37,11 +37,13 @@ def mappings(synthese_destination, users): .all() ) fieldmapping_values = { - field.name_field: ( - True - if field.autogenerated - else ([field.name_field] if field.multi else field.name_field) - ) + field.name_field: { + "column_src": ( + True + if field.autogenerated + else ([field.name_field] if field.multi else field.name_field) + ) + } for field in bib_fields } @@ -266,11 +268,11 @@ def get_mapping(mapping): def test_add_field_mapping(self, users, mappings): fieldmapping = { - "WKT": "geometrie", - "nom_cite": "nomcite", - "cd_nom": "cdnom", - "cd_hab": "cdhab", - "observers": "observateurs", + "WKT": {"column_src": "geometrie"}, + "nom_cite": {"column_src": "nomcite"}, + "cd_nom": {"column_src": "cdnom"}, + "cd_hab": {"column_src": "cdhab"}, + "observers": {"column_src": "observateurs"}, } url = url_for("import.add_mapping", mappingtype="field") @@ -296,7 +298,7 @@ def test_add_field_mapping(self, users, mappings): label=mappings["content_public"].label, ) - r = self.client.post(url, data={"unexisting": "source column"}) + r = self.client.post(url, data={"unexisting": {"column_src": "source column"}}) assert r.status_code == BadRequest.code r = self.client.post(url, data=fieldmapping) @@ -304,7 +306,7 @@ def test_add_field_mapping(self, users, mappings): fieldmapping.update( { - "date_min": "date_debut", + "date_min": {"column_src": "date_debut"}, } ) r = self.client.post(url, data=fieldmapping) @@ -406,7 +408,7 @@ def test_update_field_mapping_values(self, users, mappings): fm = mappings["field_public"] fieldvalues_update = deepcopy(fm.values) - fieldvalues_update["WKT"] = "WKT2" + fieldvalues_update["WKT"] = {"column_src": "WKT2"} fieldvalues_should = deepcopy(fieldvalues_update) del fieldvalues_update["validator"] # should not removed from mapping! r = self.client.post( diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py index 1ecddc529c..a0fd94a8a0 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py @@ -474,16 +474,20 @@ def import_data_to_destination(imprt: TImports) -> None: ef.field.name_field: ef.field for ef in entity.fields if ef.field.dest_field != None } insert_fields = {fields["id_station"]} - for field_name, source_field in imprt.fieldmapping.items(): + for field_name, mapping in imprt.fieldmapping.items(): if field_name not in fields: # not a destination field continue field = fields[field_name] + column_src = mapping.get("column_src", None) if field.multi: # TODO@TestImportsOcchab.test_import_valid_file: add testcase - if not set(source_field).isdisjoint(imprt.columns): + if not set(column_src).isdisjoint(imprt.columns): insert_fields |= {field} else: - if source_field in imprt.columns: + if ( + column_src in imprt.columns + or mapping.get("default_value", None) is not None + ): insert_fields |= {field} if entity.code == "station": # unique_dataset_id is replaced with id_dataset diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py new file mode 100644 index 0000000000..d4b6da7b1c --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py @@ -0,0 +1,175 @@ +"""add_default_mapping + +Revision ID: 650f1d749b3b +Revises: c1a6b0793360 +Create Date: 2024-12-12 13:21:49.612529 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.orm import Session +from sqlalchemy.sql import table, column +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy.ext.mutable import MutableDict + + +# revision identifiers, used by Alembic. +revision = "650f1d749b3b" +down_revision = "c1a6b0793360" +branch_labels = None +depends_on = "e43b01a18850" + + +def get_models(conn): + metadata = sa.MetaData(bind=conn) + MappingTemplate = sa.Table("t_mappings", metadata, schema="gn_imports", autoload_with=conn) + FieldMapping = sa.Table("t_fieldmappings", metadata, schema="gn_imports", autoload_with=conn) + return MappingTemplate, FieldMapping + + +def upgrade(): + conn = op.get_bind() + MappingTemplate, FieldMapping = get_models(conn) + + session = Session(bind=op.get_bind()) + id_destination_occhab = session.scalar( + sa.text("SELECT id_destination FROM gn_imports.bib_destinations WHERE code = 'occhab'") + ) + + id_occhab_mapping = session.execute( + sa.select(MappingTemplate.c.id).where(MappingTemplate.c.label == "Occhab") + ).scalar() + + if not id_occhab_mapping: + id_occhab_mapping = session.execute( + sa.insert(MappingTemplate) + .values( + label="Occhab", + type="FIELD", + active=True, + public=True, + id_destination=id_destination_occhab, + ) + .returning(MappingTemplate.c.id) + ).first()[0] + + existing_occhab_fieldmapping = session.execute( + sa.select(FieldMapping.c.id).where(FieldMapping.c.id == id_occhab_mapping) + ).first() + + if not existing_occhab_fieldmapping: + session.execute( + sa.insert(FieldMapping).values( + id=id_occhab_mapping, + values={ + "WKT": {"column_src": "geometry"}, + "altitude_max": {"column_src": "altitude_max"}, + "altitude_min": {"column_src": "altitude_min"}, + "area": {"column_src": "area"}, + "cd_hab": {"column_src": "cd_hab"}, + "comment": {"column_src": "comment"}, + "date_max": {"column_src": "date_fin"}, + "date_min": {"column_src": "date_debut"}, + "depth_max": {"column_src": "depth_max"}, + "depth_min": {"column_src": "depth_min"}, + "id_nomenclature_area_surface_calculation": { + "column_src": "methode_calcul_surface" + }, + "id_nomenclature_exposure": {"column_src": "exposition"}, + "id_nomenclature_geographic_object": {"column_src": "nature_objet_geo"}, + "id_station_source": {"column_src": "id_station"}, + "nom_cite": {"column_src": "nom_cite"}, + "observers_txt": {"column_src": "observateurs"}, + "technical_precision": {"column_src": "precision_technique"}, + "unique_dataset_id": {"column_src": "uuid_jdd"}, + "unique_id_sinp_habitat": {"column_src": "uuid_habitat"}, + "unique_id_sinp_station": {"column_src": "uuid_station"}, + }, + ) + ) + + id_mapping_sinp = session.execute( + sa.select(MappingTemplate.c.id).where( + MappingTemplate.c.label == "Occurrences d'habitats SINP" + ) + ).scalar() + + if not id_mapping_sinp: + id_mapping_sinp = session.execute( + sa.insert(MappingTemplate) + .values( + label="Occurrences d'habitats SINP", + type="FIELD", + active=True, + public=True, + id_destination=id_destination_occhab, + ) + .returning(MappingTemplate.c.id) + ).first()[0] + + existing_sinp_fieldmapping = session.execute( + sa.select(FieldMapping.c.id).where(FieldMapping.c.id == id_mapping_sinp) + ).first() + + if not existing_sinp_fieldmapping: + session.execute( + sa.insert(FieldMapping).values( + id=id_mapping_sinp, + values={ + "WKT": {"column_src": "WKT"}, + "altitude_max": {"column_src": "altMax"}, + "altitude_min": {"column_src": "altMin"}, + "area": {"column_src": "surf"}, + "cd_hab": {"column_src": "cdHab"}, + "comment": {"column_src": "comment"}, + "date_max": {"column_src": "dateFin"}, + "date_min": {"column_src": "dateDebut"}, + "depth_max": {"column_src": "profMax"}, + "depth_min": {"column_src": "profMin"}, + "determiner": {"column_src": "persDeterm"}, + "id_habitat": {"column_src": "idOrigine"}, + "id_nomenclature_abundance": {"column_src": "abondHab"}, + "id_nomenclature_area_surface_calculation": { + "column_src": "methodeCalculSurface" + }, + "id_nomenclature_collection_technique": {"column_src": "techCollec"}, + "id_nomenclature_community_interest": { + "column_src": "habitatInteretCommunautaire " + }, + "id_nomenclature_determination_type": {"column_src": "typeDeterm"}, + "id_nomenclature_exposure": {"column_src": "exposition"}, + "id_nomenclature_geographic_object": {"column_src": "natObjGeo"}, + "id_nomenclature_sensitivity": {"column_src": "sensibiliteHab"}, + "id_station_source": {"column_src": "idOrigEvt"}, + "is_habitat_complex": {"column_src": "mosaique"}, + "nom_cite": {"column_src": "nomCite"}, + "numerization_scale": {"column_src": "echelleNumerisation"}, + "observers_txt": {"column_src": "observer"}, + "precision": {"column_src": "precisGeo"}, + "recovery_percentage": {"column_src": "recouv"}, + "station_name": {"column_src": "nomStation"}, + "technical_precision": {"column_src": "precisionTechnique"}, + "unique_dataset_id": {"column_src": "jddMetaId"}, + "unique_id_sinp_grp_phyto": {"column_src": "relevePhyto"}, + "unique_id_sinp_habitat": {"column_src": "idSinpHab"}, + "unique_id_sinp_station": {"column_src": "permId"}, + }, + ) + ) + + session.commit() + session.close() + + +def downgrade(): + conn = op.get_bind() + MappingTemplate, FieldMapping = get_models(conn) + + cte = ( + sa.select(MappingTemplate.c.id) + .where(MappingTemplate.c.label.in_(["Occhab", "Occurrences d'habitats SINP"])) + .cte("mapping_cte") + ) + op.execute(sa.delete(FieldMapping).where(FieldMapping.c.id == cte.c.id)) + op.execute(sa.delete(MappingTemplate).where(MappingTemplate.c.id == cte.c.id)) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py deleted file mode 100644 index 0914c68b40..0000000000 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py +++ /dev/null @@ -1,145 +0,0 @@ -"""add_default_mapping - -Revision ID: aed662bbd88a -Revises: fcf1e091b636 -Create Date: 2024-07-19 11:04:52.224271 - -""" - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.orm import Session -from sqlalchemy.sql import table, column -from sqlalchemy.dialects.postgresql import JSON -from sqlalchemy.ext.mutable import MutableDict - -# revision identifiers, used by Alembic. -revision = "aed662bbd88a" -down_revision = "69494f900cab" -branch_labels = None -depends_on = None - - -def get_models(conn): - metadata = sa.MetaData(bind=conn) - MappingTemplate = sa.Table("t_mappings", metadata, schema="gn_imports", autoload_with=conn) - FieldMapping = sa.Table("t_fieldmappings", metadata, schema="gn_imports", autoload_with=conn) - return MappingTemplate, FieldMapping - - -def upgrade(): - conn = op.get_bind() - MappingTemplate, FieldMapping = get_models(conn) - - session = Session(bind=op.get_bind()) - id_destination_occhab = session.scalar( - sa.text("SELECT id_destination FROM gn_imports.bib_destinations WHERE code = 'occhab'") - ) - - id_occhab_mapping = session.execute( - sa.insert(MappingTemplate) - .values( - label="Occhab", - type="FIELD", - active=True, - public=True, - id_destination=id_destination_occhab, - ) - .returning(MappingTemplate.c.id) - ).first()[0] - - session.execute( - sa.insert(FieldMapping).values( - id=id_occhab_mapping, - values={ - "WKT": "geometry", - "altitude_max": "altitude_max", - "altitude_min": "altitude_min", - "area": "area", - "cd_hab": "cd_hab", - "comment": "comment", - "date_max": "date_fin", - "date_min": "date_debut", - "depth_max": "depth_max", - "depth_min": "depth_min", - "id_nomenclature_area_surface_calculation": "methode_calcul_surface", - "id_nomenclature_exposure": "exposition", - "id_nomenclature_geographic_object": "nature_objet_geo", - "id_station_source": "id_station", - "nom_cite": "nom_cite", - "observers_txt": "observateurs", - "technical_precision": "precision_technique", - "unique_dataset_id": "uuid_jdd", - "unique_id_sinp_habitat": "uuid_habitat", - "unique_id_sinp_station": "uuid_station", - }, - ) - ) - - id_mapping_sinp = session.execute( - sa.insert(MappingTemplate) - .values( - label="Occurrences d'habitats SINP", - type="FIELD", - active=True, - public=True, - id_destination=id_destination_occhab, - ) - .returning(MappingTemplate.c.id) - ).first()[0] - - session.execute( - sa.insert(FieldMapping).values( - id=id_mapping_sinp, - values={ - "WKT": "WKT", - "altitude_max": "altMax", - "altitude_min": "altMin", - "area": "surf", - "cd_hab": "cdHab", - "comment": "comment", - "date_max": "dateFin", - "date_min": "dateDebut", - "depth_max": "profMax", - "depth_min": "profMin", - "determiner": "persDeterm", - "id_habitat": "idOrigine", - "id_nomenclature_abundance": "abondHab", - "id_nomenclature_area_surface_calculation": "methodeCalculSurface", - "id_nomenclature_collection_technique": "techCollec", - "id_nomenclature_community_interest": "habitatInteretCommunautaire ", - "id_nomenclature_determination_type": "typeDeterm", - "id_nomenclature_exposure": "exposition", - "id_nomenclature_geographic_object": "natObjGeo", - "id_nomenclature_sensitivity": "sensibiliteHab", - "id_station_source": "idOrigEvt", - "is_habitat_complex": "mosaique", - "nom_cite": "nomCite", - "numerization_scale": "echelleNumerisation", - "observers_txt": "observer", - "precision": "precisGeo", - "recovery_percentage": "recouv", - "station_name": "nomStation", - "technical_precision": "precisionTechnique", - "unique_dataset_id": "jddMetaId", - "unique_id_sinp_grp_phyto": "relevePhyto", - "unique_id_sinp_habitat": "idSinpHab", - "unique_id_sinp_station": "permId", - }, - ) - ) - session.commit() - session.close() - - -def downgrade(): - conn = op.get_bind() - MappingTemplate, FieldMapping = get_models(conn) - - cte = ( - sa.select(MappingTemplate.c.id) - .where(MappingTemplate.c.label.in_(["OccHab", "Occurrences d'habitats SINP"])) - .cte("mapping_cte") - ) - op.execute(sa.delete(FieldMapping).where(FieldMapping.c.id == cte.c.id)) - op.execute(sa.delete(MappingTemplate).where(MappingTemplate.c.id == cte.c.id)) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py new file mode 100644 index 0000000000..b6a10e8676 --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py @@ -0,0 +1,21 @@ +"""do_nothing + +Revision ID: aed662bbd88a +Revises: fcf1e091b636 +Create Date: 2024-07-19 11:04:52.224271 + +""" + +# revision identifiers, used by Alembic. +revision = "aed662bbd88a" +down_revision = "69494f900cab" +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/e43f039b5ff1_bib_type_field_conforms_to_dynamic_form.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/e43f039b5ff1_bib_type_field_conforms_to_dynamic_form.py new file mode 100644 index 0000000000..6d76ffae61 --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/e43f039b5ff1_bib_type_field_conforms_to_dynamic_form.py @@ -0,0 +1,98 @@ +"""bib.type_field conforms to dynamic form + +Revision ID: e43f039b5ff1 +Revises: 650f1d749b3b +Create Date: 2024-12-13 14:37:45.171977 + +""" + +import json +from alembic import op +import sqlalchemy as sa +from sqlalchemy.orm import Session +from sqlalchemy.schema import Table, MetaData + + +# revision identifiers, used by Alembic. +revision = "e43f039b5ff1" +down_revision = "650f1d749b3b" +branch_labels = None +depends_on = None + + +def upgrade(): + session = Session(bind=op.get_bind()) + conn = op.get_bind() + meta = sa.MetaData(bind=conn) + bib_fields = sa.Table("bib_fields", meta, autoload_with=conn, schema="gn_imports") + + id_destination = session.scalar( + sa.text("SELECT id_destination FROM gn_imports.bib_destinations WHERE code = 'occhab'") + ) + + updates = { + "altitude_max": "number", + "altitude_min": "number", + "area": "number", + "cd_hab": "number", + "comment": "textarea", + "date_max": "date", + "date_min": "date", + "depth_max": "number", + "depth_min": "number", + "determiner": "textarea", + "geom_4326": "textarea", + "geom_local": "textarea", + "id_dataset": "textarea", + "id_digitiser": "number", + "id_habitat": "number", + "id_nomenclature_abundance": "nomenclature", + "id_nomenclature_area_surface_calculation": "nomenclature", + "id_nomenclature_collection_technique": "nomenclature", + "id_nomenclature_community_interest": "nomenclature", + "id_nomenclature_determination_type": "nomenclature", + "id_nomenclature_exposure": "nomenclature", + "id_nomenclature_geographic_object": "nomenclature", + "id_nomenclature_sensitivity": "nomenclature", + "id_nomenclature_type_mosaique_habitat": "nomenclature", + "id_station": "number", + "id_station_source": "number", + "latitude": "number", + "longitude": "number", + "nom_cite": "taxonomy", + "numerization_scale": "textarea", + "observers_txt": "textarea", + "precision": "textarea", + "recovery_percentage": ["number", '{"min":0, "max":100}'], + "station_name": "textarea", + "technical_precision": "textarea", + "unique_dataset_id": "dataset", + "unique_id_sinp_grp_occtax": "textarea", + "unique_id_sinp_grp_phyto": "textarea", + "unique_id_sinp_habitat": "textarea", + "unique_id_sinp_station": "textarea", + "WKT": "textarea", + } + + for name_field, value in updates.items(): + if isinstance(value, str): + values = { + "type_field": value, + } + else: + values = { + "type_field": value[0], + "type_field_params": json.loads(value[1]), + } + + op.execute( + sa.update(bib_fields) + .where( + bib_fields.c.name_field == name_field, bib_fields.c.id_destination == id_destination + ) + .values(values) + ) + + +def downgrade(): + pass diff --git a/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html b/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html index 9d96cd8045..f023e1aa0d 100644 --- a/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html +++ b/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html @@ -289,6 +289,7 @@ [cdNomenclatures]="formDefComp['cd_nomenclatures']" [regne]="formDefComp['regne']" [group2Inpn]="formDefComp['group2Inpn']" + [bindAllItem]="formDefComp['bind_all_item']" > { + return referenceControlNames.some((referenceControlName) => { const referenceControl = currentControl.parent.get(referenceControlName); // Throw an error if the reference control is null or undefined - if (referenceControl == null) throw Error('Reference formControl is null or undefined'); + if (referenceControl == null) + throw Error(`Reference formControl is null or undefined: ${referenceControlName}`); - if (referenceControl.value !== null && referenceControl.value !== undefined) { - result = true; + if ( + (referenceControl.value !== null && referenceControl.value !== undefined) || + this.hasDefaultValue(referenceControl) + ) { + return true; } }); - return result; + } + + hasDefaultValue(control: AbstractControl): boolean { + const controlName = Object.keys(control.parent.controls).find( + (name) => control.parent.get(name) == control + ); + const defaultValueControlName = `${controlName}_default_value`; + const value = control.parent.get(defaultValueControlName)?.value; + return value != null && value !== ''; + } + + isRequiredOrHasDefaultValue(control: AbstractControl): ValidationErrors | null { + if (this.hasDefaultValue(control)) { + return null; + } + return Validators.required(control); } /** @@ -231,7 +245,7 @@ export class FormService { return null; } return this.areAllRefControlsNotNull(referenceControlNames, currentControl) - ? Validators.required(currentControl) + ? this.isRequiredOrHasDefaultValue(currentControl) : null; }; } @@ -251,14 +265,14 @@ export class FormService { } return this.areAnyRefControlsNotNull(referenceControlNames, currentControl) ? null - : Validators.required(currentControl); + : this.isRequiredOrHasDefaultValue(currentControl); }; } NotRequiredIfNoOther(entityControls: string[]): ValidatorFn { return (currentControl: AbstractControl): ValidationErrors | null => { return this.areAnyRefControlsNotNull(entityControls, currentControl) - ? Validators.required(currentControl) + ? this.isRequiredOrHasDefaultValue(currentControl) : null; }; } diff --git a/frontend/src/app/GN2CommonModule/form/taxonomy/taxonomy.component.ts b/frontend/src/app/GN2CommonModule/form/taxonomy/taxonomy.component.ts index dc3ca24f40..83b143bdcb 100644 --- a/frontend/src/app/GN2CommonModule/form/taxonomy/taxonomy.component.ts +++ b/frontend/src/app/GN2CommonModule/form/taxonomy/taxonomy.component.ts @@ -99,8 +99,9 @@ export class TaxonomyComponent implements OnInit, OnChanges { if (!this.apiEndPoint) { this.setApiEndPoint(this.idList); } + // Use "!= null" instead of "!== null" to also match undefined values. this.parentFormControl.valueChanges - .pipe(filter((value) => value !== null && value.length === 0)) + .pipe(filter((value) => value != null && value.length === 0)) .subscribe((value) => { this.onDelete.emit(); }); diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html index f047d29451..9e18b1af59 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html @@ -61,7 +61,7 @@
{{ 'Import.FieldMapping.Title' | translate }}
> {{ 'Import.FieldMapping.SourceFieldsAllMapped' - | translate: { sourceFieldsLength: sourceFields.length() } + | translate: { sourceFieldsLength: sourceFields.length } }}
diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts index 918521e00a..797d20b412 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts @@ -1,14 +1,13 @@ import { Component, OnInit, ViewChild } from '@angular/core'; import { ImportDataService } from '../../../services/data.service'; import { FieldMappingService } from '@geonature/modules/imports/services/mappings/field-mapping.service'; -import { FieldMappingModalComponent } from './field-mapping-modal/field-mapping-modal.component'; import { Cruved, toBooleanCruved } from '@geonature/modules/imports/models/cruved.model'; import { Step } from '@geonature/modules/imports/models/enums.model'; import { ActivatedRoute } from '@angular/router'; import { ImportProcessService } from '../import-process.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; -import { concatMap, finalize, first, flatMap, skip, take } from 'rxjs/operators'; -import { Observable, Subscription, of } from 'rxjs'; +import { concatMap, flatMap, skip, take } from 'rxjs/operators'; +import { Observable, of } from 'rxjs'; import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; import { Import } from '@geonature/modules/imports/models/import.model'; import { @@ -33,6 +32,7 @@ export class FieldsMappingStepComponent implements OnInit { public updateAvailable: boolean = false; public step: Step; public modalCreateMappingForm = new FormControl(''); + public defaultValueFormDefs: any = {}; constructor( public _fieldMappingService: FieldMappingService, @@ -53,6 +53,7 @@ export class FieldsMappingStepComponent implements OnInit { if (!fieldMappings) return; this._fieldMappingService.parseData({ fieldMappings, targetFields, sourceFields }); this.targetFields = this._fieldMappingService.getTargetFieldsData(); + this.defaultValueFormDefs = this._fieldMappingService.getDefaultValueFormDefs(); this.sourceFields = this._fieldMappingService.getSourceFieldsData(); this._fieldMappingService.initForm(); this._fieldMappingService.populateMappingForm(); @@ -114,12 +115,24 @@ export class FieldsMappingStepComponent implements OnInit { } getFieldMappingValues(): FieldMappingValues { - let values: FieldMappingValues = {}; - for (let [key, value] of Object.entries(this._fieldMappingService.mappingFormGroup.value)) { - if (value != null) { - values[key] = Array.isArray(value) ? value : (value as string); - } - } + const values: FieldMappingValues = {}; + this._fieldMappingService + .flattenTargetFieldData(this.targetFields) + .forEach(({ name_field }) => { + const column_src = this._fieldMappingService.mappingFormGroup.get(name_field)?.value; + const default_value = this._fieldMappingService.mappingFormGroup.get( + `${name_field}_default_value` + )?.value; + if (column_src || default_value) { + values[name_field] = { + column_src: column_src || undefined, + default_value: this._fieldMappingService.getFieldDefaultValue( + name_field, + default_value + ), + }; + } + }); return values; } diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/mapping-theme/mapping-theme.component.html b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/mapping-theme/mapping-theme.component.html index b8ce2311ca..a1ec086102 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/mapping-theme/mapping-theme.component.html +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/mapping-theme/mapping-theme.component.html @@ -86,6 +86,12 @@ > {{ _fm.mappingFormGroup.controls[field.name_field].getError('conflict') }} + +
; + defaultValueFormDefs: any = {}; + constructor(public _fm: FieldMappingService) {} - ngOnInit() {} + ngOnInit() { + this.defaultValueFormDefs = this._fm.getDefaultValueFormDefs(); + } isMapped(keySource: string) { return this._fm.checkTargetFieldStatus('mapped', keySource); diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.html b/frontend/src/app/modules/imports/components/import_report/import_report.component.html index 43b87d6392..ec71838a7b 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.html +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.html @@ -131,6 +131,7 @@
Champs ({{ (importData?.fieldmapping || {} | keyvalue).length }})
Champ source Champ cible + Valeur par défaut Description @@ -138,6 +139,7 @@
Champs ({{ (importData?.fieldmapping || {} | keyvalue).length }})
{{ field.source }} {{ field.destination }} + {{ field.default_value }} {{ field.description }} diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts index a68cb63aae..0b0c937261 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts @@ -261,10 +261,12 @@ export class ImportReportComponent implements OnInit { mapField(listField: Field[], fieldMapping: FieldMappingValues): Array { const mappedFields: Array = listField.map((field) => { + const mapping = fieldMapping[field.name_field]; return { - source: fieldMapping[field.name_field], + source: mapping?.column_src, description: field.comment, destination: field.name_field, + default_value: mapping?.default_value, }; }); return mappedFields; diff --git a/frontend/src/app/modules/imports/models/mapping.model.ts b/frontend/src/app/modules/imports/models/mapping.model.ts index 302aa5a130..472b84095c 100644 --- a/frontend/src/app/modules/imports/models/mapping.model.ts +++ b/frontend/src/app/modules/imports/models/mapping.model.ts @@ -10,6 +10,11 @@ interface Mapping { cruved: Cruved; } +export interface FormDef { + type_widget: string; + [propName: string]: any; +} + export interface Field { id_field: number; autogenerated: boolean; @@ -21,10 +26,19 @@ export interface Field { multi: boolean; name_field: string; mapping_condition: string; + entity: any; + type_field: string; + type_field_params: any; + mnemonique: string; + mandatory_conditions: string[]; + optional_conditions: string[]; } export interface FieldMappingValues { - [propName: string]: string | string[]; + [propName: string]: { + column_src: string | string[]; + default_value?: string | number | any; + }; } export interface FieldMapping extends Mapping { diff --git a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts index aa870ecc49..3101ee6b86 100644 --- a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts +++ b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts @@ -6,14 +6,16 @@ import { AbstractControl, ValidationErrors, FormBuilder, - ValidatorFn, } from '@angular/forms'; import { ImportDataService } from '../data.service'; -import { FieldMapping, FieldMappingValues } from '../../models/mapping.model'; +import { Field, FieldMapping, FieldMappingValues, FormDef } from '../../models/mapping.model'; import { BehaviorSubject, Subscription, forkJoin } from 'rxjs'; import { ImportProcessService } from '../../components/import_process/import-process.service'; import { ConfigService } from '@geonature/services/config.service'; import { FormService } from '@geonature_common/form/form.service'; +import { isPlainObject } from 'lodash'; +import { NgbDateParserFormatter } from '@ng-bootstrap/ng-bootstrap'; +import { DataFormService } from '@geonature_common/form/data-form.service'; interface FieldsMappingStatus { mapped: Set; @@ -70,6 +72,10 @@ export class FieldMappingService { */ private sourceFields: Array; + private defaultValueFormDefs: { + [propName: string]: FormDef; + } = {}; + private fieldsByEntity: Map> = new Map(); /** @@ -94,7 +100,9 @@ export class FieldMappingService { private _importDataService: ImportDataService, private _importProcessService: ImportProcessService, private _configService: ConfigService, - private _formservice: FormService + private _formservice: FormService, + private _dateParser: NgbDateParserFormatter, + private _dataFormService: DataFormService ) {} /** @@ -114,12 +122,13 @@ export class FieldMappingService { this.sourceFields = sourceFields; this.fieldMappingStatus.unmapped = new Set(this.sourceFields); - this.flattenTargetFieldData(this.targetFieldsData).forEach(({ autogenerated, name_field }) => { - if (autogenerated) { - this.fieldMappingStatus.autogenerated.add(name_field); + this.flattenTargetFieldData(this.targetFieldsData).forEach((field) => { + if (field.autogenerated) { + this.fieldMappingStatus.autogenerated.add(field.name_field); } else { // this.fieldMappingStatus.unmapped.add(name_field); } + this.defaultValueFormDefs[field.name_field] = this.getFieldDefaultValueFormDef(field); }); } @@ -143,6 +152,9 @@ export class FieldMappingService { getMappings() { return this.mappingData; } + getDefaultValueFormDefs() { + return this.defaultValueFormDefs; + } getUnmappedFieldsLength() { return this.getUnmappedFields().length; @@ -162,7 +174,7 @@ export class FieldMappingService { * @param {any[]} targetFields - The array of target fields containing themes and fields. * @return {any[]} The flattened array of field mapping data. */ - flattenTargetFieldData(targetFields: any) { + flattenTargetFieldData(targetFields: any): Field[] { let flattened = []; targetFields.forEach(({ entity, themes }) => { let entityFields = []; @@ -237,28 +249,58 @@ export class FieldMappingService { */ populateMappingForm() { // Populate the form group - this.flattenTargetFieldData(this.targetFieldsData).forEach( - ({ name_field, autogenerated, mandatory, mandatory_conditions, optional_conditions }) => { - let control: AbstractControl; - let oldValue = null; - if (!(name_field in this.mappingFormGroup.controls)) { - control = new FormControl(null, mandatory ? [Validators.required] : []); - control.valueChanges.subscribe((vc) => { - if (Array.isArray(vc)) this.manageValueChangeMulti(oldValue, vc); - else this.onFieldMappingChange(vc, oldValue); - oldValue = vc; - control.setValue(vc, { emitEvent: false }); - }); - } else { - control = this.mappingFormGroup.controls[name_field]; - } + this.flattenTargetFieldData(this.targetFieldsData).forEach(({ name_field, multi }) => { + let column_src_control: AbstractControl; + let oldValue = null; + if (!(name_field in this.mappingFormGroup.controls)) { + // Control validators will be set in the following iteration + column_src_control = new FormControl(null, []); + column_src_control.valueChanges.subscribe((vc) => { + if (Array.isArray(vc)) this.manageValueChangeMulti(oldValue, vc); + else this.onFieldMappingChange(vc, oldValue); + oldValue = vc; + column_src_control.setValue(vc, { emitEvent: false }); + }); + } else { + column_src_control = this.mappingFormGroup.controls[name_field]; + } - // Reset the control in the form group - this.mappingFormGroup.addControl(name_field, control); + let default_value_control: AbstractControl; + const name_field_default_value = `${name_field}_default_value`; + if (!(name_field_default_value in this.mappingFormGroup.controls)) { + default_value_control = new FormControl(null, [ + (control: AbstractControl): ValidationErrors | null => { + if (!multi || control.value == null || control.value == '') { + return null; + } + + let isError = false; + try { + const json = JSON.parse(control.value); + if (!isPlainObject(json)) { + isError = true; + } + } catch (error) { + isError = true; + } + + return isError ? { invalidJSON: true } : null; + }, + ]); + default_value_control.valueChanges.subscribe((vc) => { + column_src_control.updateValueAndValidity(); + }); + } else { + default_value_control = this.mappingFormGroup.controls[name_field_default_value]; } - ); + + // Reset the control in the form group + this.mappingFormGroup.addControl(name_field, column_src_control); + this.mappingFormGroup.addControl(name_field_default_value, default_value_control); + }); // Deal with inter-field conditions + // TODO ? Move the content of this iteration in the previous iteration this.flattenTargetFieldData(this.targetFieldsData).forEach( ({ name_field, mandatory, mandatory_conditions, optional_conditions, entity }) => { if (mandatory_conditions !== null && !mandatory) { @@ -332,24 +374,46 @@ export class FieldMappingService { // Retrieve fields for this mapping this.mappingFormGroup.reset(); - Object.entries(mappingvalues as FieldMappingValues).forEach(([target, source]) => { + Object.entries(mappingvalues as FieldMappingValues).forEach(async ([target, source]) => { let control = this.mappingFormGroup.get(target); if (control) { - if (typeof source === 'object') { - let value = source; - let filtered = source.filter((x) => this.sourceFields.includes(x)); + if (typeof source.column_src === 'object') { + let value = source.column_src; + let filtered = source.column_src.filter((x) => this.sourceFields.includes(x)); if (filtered.length > 0) { value = filtered; } control.setValue(value); } else { if ( - !this.sourceFields.includes(source) && - !(target in this.fieldMappingStatus.autogenerated) + this.sourceFields.includes(source.column_src) || + target in this.fieldMappingStatus.autogenerated ) { - return; + control.setValue(source.column_src); + } + } + } + + const default_value_control = this.mappingFormGroup.get(`${target}_default_value`); + if (default_value_control) { + const formDef = this.defaultValueFormDefs[target]; + if (formDef.type_widget === 'date') { + return default_value_control.setValue(this._dateParser.parse(source.default_value)); + } + if (formDef.type_widget === 'taxonomy' && source.default_value) { + const taxref = await this._dataFormService.getTaxonInfoSynchrone(source.default_value); + return default_value_control.setValue(taxref); + } + + try { + const json = JSON.parse(source.default_value); + if (typeof json === 'object') { + default_value_control.setValue(JSON.stringify(json, null, 2)); + } else { + default_value_control.setValue(source.default_value); } - control.setValue(source); + } catch (error) { + default_value_control.setValue(source.default_value); } } }); @@ -411,4 +475,48 @@ export class FieldMappingService { } return false; } + + getFieldDefaultValue(name_field: string, default_value: any) { + const formDef = this.defaultValueFormDefs[name_field]; + if (formDef.type_widget === 'nomenclature') { + // Using the nomenclature's label instead of the ID allows us to avoid modifying the content mapping step. + return typeof default_value === 'string' ? default_value : default_value?.label_default; + } + if (formDef.type_widget === 'taxonomy') { + return default_value?.cd_nom; + } + if (formDef.type_widget === 'date') { + return this._dateParser.format(default_value) || undefined; + } + + return default_value ?? undefined; + } + + getFieldDefaultValueFormDef(field: Field): FormDef { + const def: any = { + attribut_label: 'Valeur par défaut', + attribut_name: `${field.name_field}_default_value`, + ...(field.type_field_params || {}), + }; + // Like server side, we use field.mnemonique and field.multi to handle these types of field + if (field.mnemonique || field.type_field === 'nomenclature') { + return { + ...def, + type_widget: 'nomenclature', + code_nomenclature_type: field.mnemonique || def.code_nomenclature_type, + bind_all_item: true, + }; + } + if (field.multi) { + return { + ...def, + type_widget: 'textarea', + }; + } + + return { + ...def, + type_widget: field.type_field || 'text', + }; + } }