diff --git a/src/datadoc/config.py b/src/datadoc/config.py index c67bdd38..9c4c393b 100644 --- a/src/datadoc/config.py +++ b/src/datadoc/config.py @@ -8,12 +8,11 @@ from pprint import pformat from typing import Literal +from dataset import enums from dotenv import dotenv_values from dotenv import load_dotenv from datadoc.constants import DAPLA_MANUAL_TEXT -from datadoc.enums import DaplaRegion -from datadoc.enums import DaplaService from datadoc.frontend.components.builders import build_link_object logging.basicConfig(level=logging.DEBUG, force=True) @@ -117,18 +116,18 @@ def get_statistical_subject_source_url() -> str | None: return _get_config_item("DATADOC_STATISTICAL_SUBJECT_SOURCE_URL") -def get_dapla_region() -> DaplaRegion | None: +def get_dapla_region() -> enums.DaplaRegion | None: """Get the Dapla region we're running on.""" if region := _get_config_item(DAPLA_REGION): - return DaplaRegion(region) + return enums.DaplaRegion(region) return None -def get_dapla_service() -> DaplaService | None: +def get_dapla_service() -> enums.DaplaService | None: """Get the Dapla service we're running on.""" if service := _get_config_item(DAPLA_SERVICE): - return DaplaService(service) + return enums.DaplaService(service) return None diff --git a/src/datadoc/enums.py b/src/datadoc/enums.py index 1b2fa666..66b4951a 100644 --- a/src/datadoc/enums.py +++ b/src/datadoc/enums.py @@ -4,39 +4,8 @@ from enum import Enum -from datadoc_model import model -from datadoc_model.model import LanguageStringType -from datadoc_model.model import LanguageStringTypeItem - - -class DaplaRegion(str, Enum): - """Dapla platforms/regions.""" - - DAPLA_LAB = "DAPLA_LAB" - BIP = "BIP" - ON_PREM = "ON_PREM" - CLOUD_RUN = "CLOUD_RUN" - - -class DaplaService(str, Enum): - """Dapla services.""" - - DATADOC = "DATADOC" - JUPYTERLAB = "JUPYTERLAB" - VS_CODE = "VS_CODE" - R_STUDIO = "R_STUDIO" - KILDOMATEN = "KILDOMATEN" - - -class SupportedLanguages(str, Enum): - """The list of languages metadata may be recorded in. - - Reference: https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry - """ - - NORSK_BOKMÅL = "nb" - NORSK_NYNORSK = "nn" - ENGLISH = "en" +from dataset import enums +from dataset import model class LanguageStringsEnum(Enum): @@ -44,7 +13,7 @@ class LanguageStringsEnum(Enum): def __init__( self, - language_strings: LanguageStringType, + language_strings: model.LanguageStringType, ) -> None: """Store the LanguageStringType object for displaying enum values in multiple languages. @@ -69,7 +38,7 @@ def _missing_(cls, value: object) -> LanguageStringsEnum: def get_value_for_language( self, - language: SupportedLanguages, + language: enums.SupportedLanguages, ) -> str | None: """Retrieve the string for the relevant language.""" if self.language_strings.root is not None: @@ -82,34 +51,34 @@ def get_value_for_language( class Assessment(LanguageStringsEnum): """Sensitivity of data.""" - SENSITIVE = LanguageStringType( + SENSITIVE = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.Assessment.SENSITIVE.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="SENSITIV"), - LanguageStringTypeItem(languageCode="nb", languageText="SENSITIV"), + model.LanguageStringTypeItem(languageCode="nn", languageText="SENSITIV"), + model.LanguageStringTypeItem(languageCode="nb", languageText="SENSITIV"), ], ) - PROTECTED = LanguageStringType( + PROTECTED = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.Assessment.PROTECTED.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="SKJERMET"), - LanguageStringTypeItem(languageCode="nb", languageText="SKJERMET"), + model.LanguageStringTypeItem(languageCode="nn", languageText="SKJERMET"), + model.LanguageStringTypeItem(languageCode="nb", languageText="SKJERMET"), ], ) - OPEN = LanguageStringType( + OPEN = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.Assessment.OPEN.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="ÅPEN"), - LanguageStringTypeItem(languageCode="nb", languageText="ÅPEN"), + model.LanguageStringTypeItem(languageCode="nn", languageText="ÅPEN"), + model.LanguageStringTypeItem(languageCode="nb", languageText="ÅPEN"), ], ) @@ -117,44 +86,44 @@ class Assessment(LanguageStringsEnum): class DataSetStatus(LanguageStringsEnum): """Lifecycle status of a dataset.""" - DRAFT = LanguageStringType( + DRAFT = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetStatus.DRAFT.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="UTKAST"), - LanguageStringTypeItem(languageCode="nb", languageText="UTKAST"), + model.LanguageStringTypeItem(languageCode="nn", languageText="UTKAST"), + model.LanguageStringTypeItem(languageCode="nb", languageText="UTKAST"), ], ) - INTERNAL = LanguageStringType( + INTERNAL = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetStatus.INTERNAL.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="INTERN"), - LanguageStringTypeItem(languageCode="nb", languageText="INTERN"), + model.LanguageStringTypeItem(languageCode="nn", languageText="INTERN"), + model.LanguageStringTypeItem(languageCode="nb", languageText="INTERN"), ], ) - EXTERNAL = LanguageStringType( + EXTERNAL = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetStatus.EXTERNAL.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="EKSTERN"), - LanguageStringTypeItem(languageCode="nb", languageText="EKSTERN"), + model.LanguageStringTypeItem(languageCode="nn", languageText="EKSTERN"), + model.LanguageStringTypeItem(languageCode="nb", languageText="EKSTERN"), ], ) - DEPRECATED = LanguageStringType( + DEPRECATED = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetStatus.DEPRECATED.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="UTGÅTT"), - LanguageStringTypeItem(languageCode="nb", languageText="UTGÅTT"), + model.LanguageStringTypeItem(languageCode="nn", languageText="UTGÅTT"), + model.LanguageStringTypeItem(languageCode="nb", languageText="UTGÅTT"), ], ) @@ -162,54 +131,60 @@ class DataSetStatus(LanguageStringsEnum): class DataSetState(LanguageStringsEnum): """Processing state of a dataset.""" - SOURCE_DATA = LanguageStringType( + SOURCE_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetState.SOURCE_DATA.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="KILDEDATA"), - LanguageStringTypeItem(languageCode="nb", languageText="KILDEDATA"), + model.LanguageStringTypeItem(languageCode="nn", languageText="KILDEDATA"), + model.LanguageStringTypeItem(languageCode="nb", languageText="KILDEDATA"), ], ) - INPUT_DATA = LanguageStringType( + INPUT_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetState.INPUT_DATA.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="INNDATA"), - LanguageStringTypeItem(languageCode="nb", languageText="INNDATA"), + model.LanguageStringTypeItem(languageCode="nn", languageText="INNDATA"), + model.LanguageStringTypeItem(languageCode="nb", languageText="INNDATA"), ], ) - PROCESSED_DATA = LanguageStringType( + PROCESSED_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetState.PROCESSED_DATA.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="KLARGJORTE DATA"), - LanguageStringTypeItem(languageCode="nb", languageText="KLARGJORTE DATA"), + model.LanguageStringTypeItem( + languageCode="nn", + languageText="KLARGJORTE DATA", + ), + model.LanguageStringTypeItem( + languageCode="nb", + languageText="KLARGJORTE DATA", + ), ], ) - STATISTICS = LanguageStringType( + STATISTICS = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetState.STATISTICS.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="STATISTIKK"), - LanguageStringTypeItem(languageCode="nb", languageText="STATISTIKK"), + model.LanguageStringTypeItem(languageCode="nn", languageText="STATISTIKK"), + model.LanguageStringTypeItem(languageCode="nb", languageText="STATISTIKK"), ], ) - OUTPUT_DATA = LanguageStringType( + OUTPUT_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataSetState.OUTPUT_DATA.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="UTDATA"), - LanguageStringTypeItem(languageCode="nb", languageText="UTDATA"), + model.LanguageStringTypeItem(languageCode="nn", languageText="UTDATA"), + model.LanguageStringTypeItem(languageCode="nb", languageText="UTDATA"), ], ) @@ -220,44 +195,44 @@ class TemporalityTypeType(LanguageStringsEnum): More information about temporality type: https://statistics-norway.atlassian.net/l/c/HV12q90R """ - FIXED = LanguageStringType( + FIXED = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.TemporalityTypeType.FIXED.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="FAST"), - LanguageStringTypeItem(languageCode="nb", languageText="FAST"), + model.LanguageStringTypeItem(languageCode="nn", languageText="FAST"), + model.LanguageStringTypeItem(languageCode="nb", languageText="FAST"), ], ) - STATUS = LanguageStringType( + STATUS = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.TemporalityTypeType.STATUS.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="TVERRSNITT"), - LanguageStringTypeItem(languageCode="nb", languageText="TVERRSNITT"), + model.LanguageStringTypeItem(languageCode="nn", languageText="TVERRSNITT"), + model.LanguageStringTypeItem(languageCode="nb", languageText="TVERRSNITT"), ], ) - ACCUMULATED = LanguageStringType( + ACCUMULATED = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.TemporalityTypeType.ACCUMULATED.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="AKKUMULERT"), - LanguageStringTypeItem(languageCode="nb", languageText="AKKUMULERT"), + model.LanguageStringTypeItem(languageCode="nn", languageText="AKKUMULERT"), + model.LanguageStringTypeItem(languageCode="nb", languageText="AKKUMULERT"), ], ) - EVENT = LanguageStringType( + EVENT = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.TemporalityTypeType.EVENT.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="HENDELSE"), - LanguageStringTypeItem(languageCode="nb", languageText="HENDELSE"), + model.LanguageStringTypeItem(languageCode="nn", languageText="HENDELSE"), + model.LanguageStringTypeItem(languageCode="nb", languageText="HENDELSE"), ], ) @@ -265,54 +240,54 @@ class TemporalityTypeType(LanguageStringsEnum): class DataType(LanguageStringsEnum): """Simplified data types for metadata purposes.""" - STRING = LanguageStringType( + STRING = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataType.STRING.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="TEKST"), - LanguageStringTypeItem(languageCode="nb", languageText="TEKST"), + model.LanguageStringTypeItem(languageCode="nn", languageText="TEKST"), + model.LanguageStringTypeItem(languageCode="nb", languageText="TEKST"), ], ) - INTEGER = LanguageStringType( + INTEGER = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataType.INTEGER.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="HELTALL"), - LanguageStringTypeItem(languageCode="nb", languageText="HELTALL"), + model.LanguageStringTypeItem(languageCode="nn", languageText="HELTALL"), + model.LanguageStringTypeItem(languageCode="nb", languageText="HELTALL"), ], ) - FLOAT = LanguageStringType( + FLOAT = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataType.FLOAT.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="DESIMALTALL"), - LanguageStringTypeItem(languageCode="nb", languageText="DESIMALTALL"), + model.LanguageStringTypeItem(languageCode="nn", languageText="DESIMALTALL"), + model.LanguageStringTypeItem(languageCode="nb", languageText="DESIMALTALL"), ], ) - DATETIME = LanguageStringType( + DATETIME = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataType.DATETIME.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="DATOTID"), - LanguageStringTypeItem(languageCode="nb", languageText="DATOTID"), + model.LanguageStringTypeItem(languageCode="nn", languageText="DATOTID"), + model.LanguageStringTypeItem(languageCode="nb", languageText="DATOTID"), ], ) - BOOLEAN = LanguageStringType( + BOOLEAN = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.DataType.BOOLEAN.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="BOOLSK"), - LanguageStringTypeItem(languageCode="nb", languageText="BOOLSK"), + model.LanguageStringTypeItem(languageCode="nn", languageText="BOOLSK"), + model.LanguageStringTypeItem(languageCode="nb", languageText="BOOLSK"), ], ) @@ -320,49 +295,49 @@ class DataType(LanguageStringsEnum): class IsPersonalData(LanguageStringsEnum): """Is the variable instance personal data and if so, how is it encrypted.""" - NOT_PERSONAL_DATA = LanguageStringType( + NOT_PERSONAL_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.IsPersonalData.NOT_PERSONAL_DATA.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="IKKE PERSONOPPLYSNING", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="IKKE PERSONOPPLYSNING", ), ], ) - PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA = LanguageStringType( + PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.IsPersonalData.PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="PSEUDONYMISERT/KRYPTERT PERSONOPPLYSNING", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="PSEUDONYMISERT/KRYPTERT PERSONOPPLYSNING", ), ], ) - NON_PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA = LanguageStringType( + NON_PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.IsPersonalData.NON_PSEUDONYMISED_ENCRYPTED_PERSONAL_DATA.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="IKKE PSEUDONYMISERT/KRYPTERT PERSONOPPLYSNING", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="IKKE PSEUDONYMISERT/KRYPTERT PERSONOPPLYSNING", ), @@ -373,54 +348,66 @@ class IsPersonalData(LanguageStringsEnum): class VariableRole(LanguageStringsEnum): """The role of a variable in a dataset.""" - IDENTIFIER = LanguageStringType( + IDENTIFIER = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.VariableRole.IDENTIFIER.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="IDENTIFIKATOR"), - LanguageStringTypeItem(languageCode="nb", languageText="IDENTIFIKATOR"), + model.LanguageStringTypeItem( + languageCode="nn", + languageText="IDENTIFIKATOR", + ), + model.LanguageStringTypeItem( + languageCode="nb", + languageText="IDENTIFIKATOR", + ), ], ) - MEASURE = LanguageStringType( + MEASURE = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.VariableRole.MEASURE.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="MÅLEVARIABEL"), - LanguageStringTypeItem(languageCode="nb", languageText="MÅLEVARIABEL"), + model.LanguageStringTypeItem( + languageCode="nn", + languageText="MÅLEVARIABEL", + ), + model.LanguageStringTypeItem( + languageCode="nb", + languageText="MÅLEVARIABEL", + ), ], ) - START_TIME = LanguageStringType( + START_TIME = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.VariableRole.START_TIME.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="STARTTID"), - LanguageStringTypeItem(languageCode="nb", languageText="STARTTID"), + model.LanguageStringTypeItem(languageCode="nn", languageText="STARTTID"), + model.LanguageStringTypeItem(languageCode="nb", languageText="STARTTID"), ], ) - STOP_TIME = LanguageStringType( + STOP_TIME = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.VariableRole.STOP_TIME.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="STOPPTID"), - LanguageStringTypeItem(languageCode="nb", languageText="STOPPTID"), + model.LanguageStringTypeItem(languageCode="nn", languageText="STOPPTID"), + model.LanguageStringTypeItem(languageCode="nb", languageText="STOPPTID"), ], ) - ATTRIBUTE = LanguageStringType( + ATTRIBUTE = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.VariableRole.ATTRIBUTE.value, ), - LanguageStringTypeItem(languageCode="nn", languageText="ATTRIBUTT"), - LanguageStringTypeItem(languageCode="nb", languageText="ATTRIBUTT"), + model.LanguageStringTypeItem(languageCode="nn", languageText="ATTRIBUTT"), + model.LanguageStringTypeItem(languageCode="nb", languageText="ATTRIBUTT"), ], ) @@ -428,49 +415,49 @@ class VariableRole(LanguageStringsEnum): class UseRestriction(LanguageStringsEnum): """Lifecycle status of a dataset.""" - DELETION_ANONYMIZATION = LanguageStringType( + DELETION_ANONYMIZATION = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.UseRestriction.DELETION_ANONYMIZATION.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="SLETTING/ANONYMISERING", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="SLETTING/ANONYMISERING", ), ], ) - PROCESS_LIMITATIONS = LanguageStringType( + PROCESS_LIMITATIONS = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.UseRestriction.PROCESS_LIMITATIONS.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="BEHANDLINGSBEGRENSNINGER", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="BEHANDLINGSBEGRENSNINGER", ), ], ) - SECONDARY_USE_RESTRICTIONS = LanguageStringType( + SECONDARY_USE_RESTRICTIONS = model.LanguageStringType( [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText=model.UseRestriction.SECONDARY_USE_RESTRICTIONS.value, ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="SEKUNDÆRBRUKSRESTRIKSJONER", ), - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="SEKUNDÆRBRUKSRESTRIKSJONER", ), diff --git a/src/datadoc/frontend/callbacks/dataset.py b/src/datadoc/frontend/callbacks/dataset.py index bdb656f7..af9abda6 100644 --- a/src/datadoc/frontend/callbacks/dataset.py +++ b/src/datadoc/frontend/callbacks/dataset.py @@ -9,12 +9,12 @@ import arrow from dash import no_update +from dataset import DaplaDatasetPathInfo +from dataset import Datadoc from pydantic import ValidationError from datadoc import config from datadoc import state -from datadoc.backend.core import Datadoc -from datadoc.backend.dapla_dataset_path_info import DaplaDatasetPathInfo from datadoc.constants import CHECK_OBLIGATORY_METADATA_DATASET_MESSAGE from datadoc.constants import MISSING_METADATA_WARNING from datadoc.frontend.callbacks.utils import VALIDATION_ERROR @@ -33,6 +33,8 @@ ) from datadoc.frontend.components.builders import AlertTypes from datadoc.frontend.components.builders import build_ssb_alert +from datadoc.frontend.constants import INVALID_DATE_ORDER +from datadoc.frontend.constants import INVALID_VALUE from datadoc.frontend.fields.display_dataset import DISPLAY_DATASET from datadoc.frontend.fields.display_dataset import ( DROPDOWN_DATASET_METADATA_IDENTIFIERS, @@ -45,13 +47,11 @@ ) from datadoc.frontend.fields.display_dataset import TIMEZONE_AWARE_METADATA_IDENTIFIERS from datadoc.frontend.fields.display_dataset import DatasetIdentifiers -from datadoc.frontend.text import INVALID_DATE_ORDER -from datadoc.frontend.text import INVALID_VALUE from datadoc.utils import METADATA_DOCUMENT_FILE_SUFFIX if TYPE_CHECKING: import dash_bootstrap_components as dbc - from datadoc_model.model import LanguageStringType + from dataset import model logger = logging.getLogger(__name__) @@ -136,17 +136,17 @@ def process_keyword(value: str) -> list[str]: def process_special_cases( - value: MetadataInputTypes | LanguageStringType, + value: MetadataInputTypes | model.LanguageStringType, metadata_identifier: str, language: str | None = None, -) -> MetadataInputTypes | LanguageStringType: +) -> MetadataInputTypes | model.LanguageStringType: """Pre-process metadata where needed. Some types of metadata need processing before being saved to the model. Handle these cases here, other values are returned unchanged. """ - updated_value: MetadataInputTypes | LanguageStringType + updated_value: MetadataInputTypes | model.LanguageStringType if metadata_identifier == DatasetIdentifiers.KEYWORD.value and isinstance( value, str, @@ -188,7 +188,7 @@ def process_special_cases( def accept_dataset_metadata_input( - value: MetadataInputTypes | LanguageStringType, + value: MetadataInputTypes | model.LanguageStringType, metadata_identifier: str, language: str | None = None, ) -> tuple[bool, str]: diff --git a/src/datadoc/frontend/callbacks/register_callbacks.py b/src/datadoc/frontend/callbacks/register_callbacks.py index f1e9e1bf..90463f2a 100644 --- a/src/datadoc/frontend/callbacks/register_callbacks.py +++ b/src/datadoc/frontend/callbacks/register_callbacks.py @@ -17,10 +17,10 @@ from dash import ctx from dash import html from dash import no_update +from dataset import ObligatoryDatasetWarning +from dataset import ObligatoryVariableWarning from datadoc import state -from datadoc.backend.model_validation import ObligatoryDatasetWarning -from datadoc.backend.model_validation import ObligatoryVariableWarning from datadoc.frontend.callbacks.dataset import accept_dataset_metadata_date_input from datadoc.frontend.callbacks.dataset import accept_dataset_metadata_input from datadoc.frontend.callbacks.dataset import dataset_control diff --git a/src/datadoc/frontend/callbacks/utils.py b/src/datadoc/frontend/callbacks/utils.py index 67a27588..3b25fb53 100644 --- a/src/datadoc/frontend/callbacks/utils.py +++ b/src/datadoc/frontend/callbacks/utils.py @@ -10,7 +10,7 @@ import arrow import ssb_dash_components as ssb from dash import html -from datadoc_model import model +from dataset import model from datadoc import config from datadoc import state diff --git a/src/datadoc/frontend/callbacks/variables.py b/src/datadoc/frontend/callbacks/variables.py index 9afab330..e0a0be3c 100644 --- a/src/datadoc/frontend/callbacks/variables.py +++ b/src/datadoc/frontend/callbacks/variables.py @@ -18,6 +18,8 @@ from datadoc.frontend.components.builders import build_edit_section from datadoc.frontend.components.builders import build_ssb_accordion from datadoc.frontend.components.builders import build_ssb_alert +from datadoc.frontend.constants import INVALID_DATE_ORDER +from datadoc.frontend.constants import INVALID_VALUE from datadoc.frontend.fields.display_variables import DISPLAY_VARIABLES from datadoc.frontend.fields.display_variables import ( MULTIPLE_LANGUAGE_VARIABLES_METADATA, @@ -28,13 +30,10 @@ ) from datadoc.frontend.fields.display_variables import OPTIONAL_VARIABLES_METADATA from datadoc.frontend.fields.display_variables import VariableIdentifiers -from datadoc.frontend.text import INVALID_DATE_ORDER -from datadoc.frontend.text import INVALID_VALUE if TYPE_CHECKING: import dash_bootstrap_components as dbc - from datadoc_model import model - from datadoc_model.model import LanguageStringType + from dataset import model logger = logging.getLogger(__name__) @@ -77,10 +76,10 @@ def populate_variables_workspace( def handle_multi_language_metadata( metadata_field: str, - new_value: MetadataInputTypes | LanguageStringType, + new_value: MetadataInputTypes | model.LanguageStringType, updated_row_id: str, language: str, -) -> MetadataInputTypes | LanguageStringType: +) -> MetadataInputTypes | model.LanguageStringType: """Handle updates to fields which support multiple languages.""" if new_value is None: # This edge case occurs when the user removes the text in an input field @@ -267,7 +266,7 @@ def variable_identifier_multilanguage( def set_variables_values_inherit_dataset_values( - value: MetadataInputTypes | LanguageStringType, + value: MetadataInputTypes | model.LanguageStringType, metadata_identifier: str, ) -> None: """Set variable value based on dataset value.""" @@ -282,7 +281,7 @@ def set_variables_values_inherit_dataset_values( def set_variables_value_multilanguage_inherit_dataset_values( - value: MetadataInputTypes | LanguageStringType, + value: MetadataInputTypes | model.LanguageStringType, metadata_identifier: str, language: str, ) -> None: diff --git a/src/datadoc/frontend/components/builders.py b/src/datadoc/frontend/components/builders.py index a3c915ed..606a6029 100644 --- a/src/datadoc/frontend/components/builders.py +++ b/src/datadoc/frontend/components/builders.py @@ -16,7 +16,7 @@ from datadoc.frontend.fields.display_base import FieldTypes if TYPE_CHECKING: - from datadoc_model import model + from dataset import model class AlertTypes(Enum): diff --git a/src/datadoc/frontend/text.py b/src/datadoc/frontend/constants.py similarity index 73% rename from src/datadoc/frontend/text.py rename to src/datadoc/frontend/constants.py index 00e4940d..f92d72f5 100644 --- a/src/datadoc/frontend/text.py +++ b/src/datadoc/frontend/constants.py @@ -1,2 +1,4 @@ +"""Repository for constant values in Datadoc frontend module.""" + INVALID_VALUE = "Ugyldig verdi angitt!" INVALID_DATE_ORDER = "Verdien for {contains_data_from_display_name} må være en lik eller tidligere dato som {contains_data_until_display_name}" diff --git a/src/datadoc/frontend/fields/display_base.py b/src/datadoc/frontend/fields/display_base.py index 9c290196..84af46ee 100644 --- a/src/datadoc/frontend/fields/display_base.py +++ b/src/datadoc/frontend/fields/display_base.py @@ -11,18 +11,18 @@ import ssb_dash_components as ssb from dash import html +from dataset import enums from datadoc import state -from datadoc.enums import LanguageStringsEnum -from datadoc.enums import SupportedLanguages if TYPE_CHECKING: from collections.abc import Callable from dash.development.base_component import Component - from datadoc_model.model import LanguageStringType + from dataset import model from pydantic import BaseModel + from datadoc.enums import LanguageStringsEnum from datadoc.frontend.callbacks.utils import MetadataInputTypes logger = logging.getLogger(__name__) @@ -39,17 +39,17 @@ METADATA_LANGUAGES = [ { - "supported_language": SupportedLanguages.NORSK_BOKMÅL, + "supported_language": enums.SupportedLanguages.NORSK_BOKMÅL, "language_title": "Bokmål", "language_value": "nb", }, { - "supported_language": SupportedLanguages.NORSK_NYNORSK, + "supported_language": enums.SupportedLanguages.NORSK_NYNORSK, "language_title": "Nynorsk", "language_value": "nn", }, { - "supported_language": SupportedLanguages.ENGLISH, + "supported_language": enums.SupportedLanguages.ENGLISH, "language_title": "English", "language_value": "en", }, @@ -62,7 +62,8 @@ def get_enum_options( """Generate the list of options based on the currently chosen language.""" dropdown_options = [ { - "title": i.get_value_for_language(SupportedLanguages.NORSK_BOKMÅL) or "", + "title": i.get_value_for_language(enums.SupportedLanguages.NORSK_BOKMÅL) + or "", "id": i.name, } for i in enum # type: ignore [attr-defined] @@ -75,7 +76,7 @@ def get_data_source_options() -> list[dict[str, str]]: """Collect the unit type options.""" dropdown_options = [ { - "title": data_sources.get_title(SupportedLanguages.NORSK_BOKMÅL), + "title": data_sources.get_title(enums.SupportedLanguages.NORSK_BOKMÅL), "id": data_sources.code, } for data_sources in state.data_sources.classifications @@ -98,8 +99,8 @@ def get_metadata_and_stringify(metadata: BaseModel, identifier: str) -> str | No def _get_string_type_item( - language_strings: LanguageStringType, - current_metadata_language: SupportedLanguages, + language_strings: model.LanguageStringType, + current_metadata_language: enums.SupportedLanguages, ) -> str | None: if language_strings.root is not None: for i in language_strings.root: @@ -111,10 +112,10 @@ def _get_string_type_item( def get_multi_language_metadata_and_stringify( metadata: BaseModel, identifier: str, - language: SupportedLanguages, + language: enums.SupportedLanguages, ) -> str | None: """Get a metadata value supporting multiple languages from the model.""" - value: LanguageStringType | None = getattr(metadata, identifier) + value: model.LanguageStringType | None = getattr(metadata, identifier) if value is None: return "" return _get_string_type_item(value, language) @@ -276,7 +277,7 @@ def render_input_group( value=get_multi_language_metadata_and_stringify( metadata, self.identifier, - SupportedLanguages(i["supported_language"]), + enums.SupportedLanguages(i["supported_language"]), ), debounce=True, id={ @@ -298,7 +299,7 @@ def render_input_group( value=get_multi_language_metadata_and_stringify( metadata, self.identifier, - SupportedLanguages(i["supported_language"]), + enums.SupportedLanguages(i["supported_language"]), ), debounce=True, id={ diff --git a/src/datadoc/frontend/fields/display_dataset.py b/src/datadoc/frontend/fields/display_dataset.py index 330e5ddc..8ed41408 100644 --- a/src/datadoc/frontend/fields/display_dataset.py +++ b/src/datadoc/frontend/fields/display_dataset.py @@ -6,9 +6,14 @@ import logging from enum import Enum -from datadoc import enums +from dataset import enums + from datadoc import state -from datadoc.enums import SupportedLanguages +from datadoc.enums import Assessment +from datadoc.enums import DataSetState +from datadoc.enums import DataSetStatus +from datadoc.enums import TemporalityTypeType +from datadoc.enums import UseRestriction from datadoc.frontend.fields.display_base import DATASET_METADATA_DATE_INPUT from datadoc.frontend.fields.display_base import DATASET_METADATA_MULTILANGUAGE_INPUT from datadoc.frontend.fields.display_base import DROPDOWN_DESELECT_OPTION @@ -30,7 +35,7 @@ def get_statistical_subject_options() -> list[dict[str, str]]: """Generate the list of options for statistical subject.""" dropdown_options = [ { - "title": f"{primary.get_title(SupportedLanguages.NORSK_BOKMÅL)} - {secondary.get_title(SupportedLanguages.NORSK_BOKMÅL)}", + "title": f"{primary.get_title(enums.SupportedLanguages.NORSK_BOKMÅL)} - {secondary.get_title(enums.SupportedLanguages.NORSK_BOKMÅL)}", "id": secondary.subject_code, } for primary in state.statistic_subject_mapping.primary_subjects @@ -44,7 +49,7 @@ def get_unit_type_options() -> list[dict[str, str]]: """Collect the unit type options.""" dropdown_options = [ { - "title": unit_type.get_title(SupportedLanguages.NORSK_BOKMÅL), + "title": unit_type.get_title(enums.SupportedLanguages.NORSK_BOKMÅL), "id": unit_type.code, } for unit_type in state.unit_types.classifications @@ -57,7 +62,7 @@ def get_owner_options() -> list[dict[str, str]]: """Collect the owner options.""" dropdown_options = [ { - "title": f"{option.code} - {option.get_title(SupportedLanguages.NORSK_BOKMÅL)}", + "title": f"{option.code} - {option.get_title(enums.SupportedLanguages.NORSK_BOKMÅL)}", "id": option.code, } for option in state.organisational_units.classifications @@ -116,7 +121,7 @@ class DatasetIdentifiers(str, Enum): obligatory=True, options_getter=functools.partial( get_enum_options, - enums.Assessment, + Assessment, ), ), DatasetIdentifiers.DATASET_STATUS: MetadataDropdownField( @@ -125,7 +130,7 @@ class DatasetIdentifiers(str, Enum): description="Oppgi om metadataene er under arbeid (utkast), kan deles internt (intern), kan deles eksternt(ekstern) eller er avsluttet/erstattet (utgått). Det kan være restriksjoner knyttet til deling både internt og eksternt.", options_getter=functools.partial( get_enum_options, - enums.DataSetStatus, + DataSetStatus, ), obligatory=True, ), @@ -136,7 +141,7 @@ class DatasetIdentifiers(str, Enum): obligatory=True, options_getter=functools.partial( get_enum_options, - enums.DataSetState, + DataSetState, ), ), DatasetIdentifiers.NAME: MetadataMultiLanguageField( @@ -194,7 +199,7 @@ class DatasetIdentifiers(str, Enum): description="Temporalitetstypen sier noe om tidsdimensjonen i datasettet. Fast er data med verdi som ikke endres over tid (f.eks. fødselsdato), tverrsnitt er data som er målt på et gitt tidspunkt, akkumulert er data som er samlet over en viss tidsperiode (f.eks. inntekt gjennom et år) og hendelse/forløp registrerer tidspunkt og tidsperiode for ulike hendelser /tilstander, f.eks. (skifte av) bosted.", options_getter=functools.partial( get_enum_options, - enums.TemporalityTypeType, + TemporalityTypeType, ), obligatory=True, ), @@ -290,7 +295,7 @@ class DatasetIdentifiers(str, Enum): description="Oppgi om det er knyttet noen bruksrestriksjoner til datasettet, f.eks. krav om sletting/anonymisering.", options_getter=functools.partial( get_enum_options, - enums.UseRestriction, + UseRestriction, ), ), DatasetIdentifiers.USE_RESTRICTION_DATE: MetadataDateField( diff --git a/src/datadoc/frontend/fields/display_variables.py b/src/datadoc/frontend/fields/display_variables.py index ae68f1e6..110775b1 100644 --- a/src/datadoc/frontend/fields/display_variables.py +++ b/src/datadoc/frontend/fields/display_variables.py @@ -5,8 +5,13 @@ import functools from enum import Enum -from datadoc import enums +from dataset import enums + from datadoc import state +from datadoc.enums import DataType +from datadoc.enums import IsPersonalData +from datadoc.enums import TemporalityTypeType +from datadoc.enums import VariableRole from datadoc.frontend.fields.display_base import VARIABLES_METADATA_DATE_INPUT from datadoc.frontend.fields.display_base import VARIABLES_METADATA_MULTILANGUAGE_INPUT from datadoc.frontend.fields.display_base import FieldTypes @@ -80,7 +85,7 @@ class VariableIdentifiers(str, Enum): obligatory=True, options_getter=functools.partial( get_enum_options, - enums.DataType, + DataType, ), ), VariableIdentifiers.VARIABLE_ROLE: MetadataDropdownField( @@ -90,7 +95,7 @@ class VariableIdentifiers(str, Enum): obligatory=True, options_getter=functools.partial( get_enum_options, - enums.VariableRole, + VariableRole, ), ), VariableIdentifiers.DEFINITION_URI: MetadataInputField( @@ -106,7 +111,7 @@ class VariableIdentifiers(str, Enum): obligatory=True, options_getter=functools.partial( get_enum_options, - enums.IsPersonalData, + IsPersonalData, ), ), VariableIdentifiers.DATA_SOURCE: MetadataDropdownField( @@ -133,7 +138,7 @@ class VariableIdentifiers(str, Enum): description="Temporalitetstypen settes vanligvis på datasettnivå, men dersom datasettet består av variabler med ulike temporalitetstyper, kan den settes på variabelnivå. Temporalitet sier noe om tidsdimensjonen i datasettet. Fast er data med verdi som ikke endres over tid (f.eks. fødselsdato), tverrsnitt er data som er målt på et gitt tidspunkt, akkumulert er data som er samlet over en viss tidsperiode (f.eks. inntekt gjennom et år) og hendelse/forløp registrerer tidspunkt og tidsperiode for ulike hendelser /tilstander, f.eks. (skifte av) bosted.", options_getter=functools.partial( get_enum_options, - enums.TemporalityTypeType, + TemporalityTypeType, ), ), VariableIdentifiers.MEASUREMENT_UNIT: MetadataDropdownField( diff --git a/tests/conftest.py b/tests/conftest.py index 06e48030..73dfae60 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,23 +10,19 @@ import shutil from datetime import datetime from datetime import timezone -from pathlib import Path from typing import TYPE_CHECKING import pandas as pd import pytest from bs4 import BeautifulSoup from bs4 import ResultSet -from datadoc_model import model +from dataset import Datadoc +from dataset import model +from dataset.code_list import CodeList +from dataset.statistic_subject_mapping import StatisticSubjectMapping +from dataset.user_info import TestUserInfo from datadoc import state -from datadoc.backend.code_list import CodeList -from datadoc.backend.core import Datadoc -from datadoc.backend.statistic_subject_mapping import StatisticSubjectMapping -from datadoc.backend.user_info import TestUserInfo -from tests.backend.test_statistic_subject_mapping import ( - STATISTICAL_SUBJECT_STRUCTURE_DIR, -) from .utils import TEST_DATASETS_DIRECTORY from .utils import TEST_EXISTING_METADATA_DIRECTORY @@ -37,7 +33,15 @@ from .utils import TEST_PARQUET_FILEPATH from .utils import TEST_RESOURCES_DIRECTORY +if TYPE_CHECKING: + from pathlib import Path + + from pytest_mock import MockerFixture + + +DATADOC_METADATA_MODULE = "dataset" CODE_LIST_DIR = "code_list" +STATISTICAL_SUBJECT_STRUCTURE_DIR = "statistical_subject_structure" if TYPE_CHECKING: from pytest_mock import MockerFixture diff --git a/tests/frontend/callbacks/test_dataset_callbacks.py b/tests/frontend/callbacks/test_dataset_callbacks.py index 6999d0b3..4d84fab2 100644 --- a/tests/frontend/callbacks/test_dataset_callbacks.py +++ b/tests/frontend/callbacks/test_dataset_callbacks.py @@ -11,26 +11,27 @@ import dash import dash_bootstrap_components as dbc import pytest -from datadoc_model import model +from dataset import ObligatoryDatasetWarning +from dataset import model from datadoc import enums from datadoc import state -from datadoc.backend.model_validation import ObligatoryDatasetWarning from datadoc.frontend.callbacks.dataset import accept_dataset_metadata_date_input from datadoc.frontend.callbacks.dataset import accept_dataset_metadata_input from datadoc.frontend.callbacks.dataset import dataset_control from datadoc.frontend.callbacks.dataset import open_dataset_handling from datadoc.frontend.callbacks.dataset import process_special_cases +from datadoc.frontend.constants import INVALID_DATE_ORDER +from datadoc.frontend.constants import INVALID_VALUE from datadoc.frontend.fields.display_dataset import DISPLAY_DATASET from datadoc.frontend.fields.display_dataset import ( MULTIPLE_LANGUAGE_DATASET_IDENTIFIERS, ) from datadoc.frontend.fields.display_dataset import DatasetIdentifiers -from datadoc.frontend.text import INVALID_DATE_ORDER -from datadoc.frontend.text import INVALID_VALUE if TYPE_CHECKING: - from datadoc.backend.core import Datadoc + from dataset import Datadoc + from datadoc.frontend.callbacks.utils import MetadataInputTypes DATASET_CALLBACKS_MODULE = "datadoc.frontend.callbacks.dataset" @@ -73,9 +74,9 @@ def file_path_without_dates(): ( DatasetIdentifiers.NAME, "Dataset name", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Dataset name", ), @@ -85,9 +86,9 @@ def file_path_without_dates(): ( DatasetIdentifiers.DESCRIPTION, "Dataset description", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Dataset description", ), @@ -102,9 +103,9 @@ def file_path_without_dates(): ( DatasetIdentifiers.POPULATION_DESCRIPTION, "Population description", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Population description", ), @@ -115,9 +116,9 @@ def file_path_without_dates(): ( DatasetIdentifiers.VERSION_DESCRIPTION, "Version description", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Version description", ), @@ -147,17 +148,17 @@ def file_path_without_dates(): ( DatasetIdentifiers.SPATIAL_COVERAGE_DESCRIPTION, "Spatial coverage description", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Spatial coverage description", ), - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nn", languageText="Noreg", ), - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="en", languageText="Norway", ), diff --git a/tests/frontend/callbacks/test_variables_callbacks.py b/tests/frontend/callbacks/test_variables_callbacks.py index 36e0a035..979b9274 100644 --- a/tests/frontend/callbacks/test_variables_callbacks.py +++ b/tests/frontend/callbacks/test_variables_callbacks.py @@ -10,13 +10,12 @@ import arrow import dash_bootstrap_components as dbc import pytest -from datadoc_model import model -from datadoc_model.model import LanguageStringTypeItem +from dataset import ObligatoryVariableWarning +from dataset import model from pydantic_core import Url from datadoc import enums from datadoc import state -from datadoc.backend.model_validation import ObligatoryVariableWarning from datadoc.frontend.callbacks.variables import accept_variable_metadata_date_input from datadoc.frontend.callbacks.variables import accept_variable_metadata_input from datadoc.frontend.callbacks.variables import populate_variables_workspace @@ -30,16 +29,17 @@ set_variables_values_inherit_dataset_values, ) from datadoc.frontend.callbacks.variables import variables_control +from datadoc.frontend.constants import INVALID_DATE_ORDER +from datadoc.frontend.constants import INVALID_VALUE from datadoc.frontend.fields.display_base import get_metadata_and_stringify from datadoc.frontend.fields.display_base import get_standard_metadata from datadoc.frontend.fields.display_dataset import DatasetIdentifiers from datadoc.frontend.fields.display_variables import DISPLAY_VARIABLES from datadoc.frontend.fields.display_variables import VariableIdentifiers -from datadoc.frontend.text import INVALID_DATE_ORDER -from datadoc.frontend.text import INVALID_VALUE if TYPE_CHECKING: - from datadoc.backend.core import Datadoc + from dataset import Datadoc + from datadoc.frontend.callbacks.utils import MetadataInputTypes @@ -49,9 +49,9 @@ ( VariableIdentifiers.NAME, "Variable name", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Variable name", ), @@ -86,9 +86,9 @@ ( VariableIdentifiers.POPULATION_DESCRIPTION, "Population description", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Population description", ), @@ -98,9 +98,9 @@ ( VariableIdentifiers.COMMENT, "Comment", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Comment", ), @@ -130,9 +130,9 @@ ( VariableIdentifiers.INVALID_VALUE_DESCRIPTION, "Invalid value", - enums.LanguageStringType( + model.LanguageStringType( [ - enums.LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Invalid value", ), @@ -452,7 +452,7 @@ def test_variables_values_multilanguage_inherit_dataset_values( state.metadata = metadata dataset_population_description = "Personer bosatt i Norge" dataset_population_description_language_item = [ - LanguageStringTypeItem( + model.LanguageStringTypeItem( languageCode="nb", languageText="Personer bosatt i Norge", ), @@ -482,7 +482,10 @@ def test_variables_values_multilanguage_can_be_changed_after_inherit_dataset_val state.metadata = metadata dataset_population_description = "Persons in Norway" dataset_population_description_language_item = [ - LanguageStringTypeItem(languageCode="en", languageText="Persons in Norway"), + model.LanguageStringTypeItem( + languageCode="en", + languageText="Persons in Norway", + ), ] dataset_identifier = DatasetIdentifiers.POPULATION_DESCRIPTION variables_identifier = VariableIdentifiers.POPULATION_DESCRIPTION @@ -503,7 +506,10 @@ def test_variables_values_multilanguage_can_be_changed_after_inherit_dataset_val variables_identifier, ) variables_language_item = [ - LanguageStringTypeItem(languageCode="en", languageText="Persons in Sweden"), + model.LanguageStringTypeItem( + languageCode="en", + languageText="Persons in Sweden", + ), ] setattr( state.metadata.variables_lookup["pers_id"], @@ -598,7 +604,7 @@ def test_variables_metadata_control_dont_return_alert(metadata: Datadoc): state.metadata.variables_lookup[val.short_name], VariableIdentifiers.NAME, model.LanguageStringType( - [LanguageStringTypeItem(languageCode="nb", languageText="Test")], + [model.LanguageStringTypeItem(languageCode="nb", languageText="Test")], ), ) setattr( diff --git a/tests/frontend/components/test_build_dataset_edit_section.py b/tests/frontend/components/test_build_dataset_edit_section.py index 69d2fe0b..169ea272 100644 --- a/tests/frontend/components/test_build_dataset_edit_section.py +++ b/tests/frontend/components/test_build_dataset_edit_section.py @@ -4,7 +4,7 @@ import pytest import ssb_dash_components as ssb # type: ignore[import-untyped] from dash import html -from datadoc_model import model +from dataset import model from datadoc.frontend.components.builders import build_dataset_edit_section from datadoc.frontend.fields.display_base import FieldTypes @@ -22,9 +22,12 @@ for m in DISPLAY_DATASET.values() if m.obligatory and m.editable - and m.identifier != DatasetIdentifiers.UNIT_TYPE.value - and m.identifier != DatasetIdentifiers.SUBJECT_FIELD.value - and m.identifier != DatasetIdentifiers.OWNER.value + and m.identifier + not in ( + DatasetIdentifiers.UNIT_TYPE.value, + DatasetIdentifiers.SUBJECT_FIELD.value, + DatasetIdentifiers.OWNER.value, + ) ] INPUT_DATA_BUILD_DATASET_SECTION = [ diff --git a/tests/frontend/components/test_build_edit_section.py b/tests/frontend/components/test_build_edit_section.py index 7b9cbb46..f5d6449d 100644 --- a/tests/frontend/components/test_build_edit_section.py +++ b/tests/frontend/components/test_build_edit_section.py @@ -4,7 +4,7 @@ import pytest import ssb_dash_components as ssb # type: ignore[import-untyped] from dash import html -from datadoc_model import model +from dataset import model from datadoc.frontend.components.builders import build_edit_section from datadoc.frontend.fields.display_variables import OBLIGATORY_VARIABLES_METADATA diff --git a/tests/frontend/components/test_build_input_section.py b/tests/frontend/components/test_build_input_section.py index f7474348..199e6bac 100644 --- a/tests/frontend/components/test_build_input_section.py +++ b/tests/frontend/components/test_build_input_section.py @@ -3,7 +3,7 @@ import dash_bootstrap_components as dbc import pytest import ssb_dash_components as ssb # type: ignore[import-untyped] -from datadoc_model import model +from dataset import model from datadoc.frontend.components.builders import build_input_field_section from datadoc.frontend.fields.display_base import MetadataCheckboxField diff --git a/tests/frontend/fields/test_display_dataset.py b/tests/frontend/fields/test_display_dataset.py index 5a6b8249..6021291e 100644 --- a/tests/frontend/fields/test_display_dataset.py +++ b/tests/frontend/fields/test_display_dataset.py @@ -4,10 +4,8 @@ from datadoc.frontend.fields.display_base import DROPDOWN_DESELECT_OPTION from datadoc.frontend.fields.display_dataset import get_statistical_subject_options from datadoc.frontend.fields.display_dataset import get_unit_type_options -from tests.backend.test_code_list import CODE_LIST_DIR -from tests.backend.test_statistic_subject_mapping import ( - STATISTICAL_SUBJECT_STRUCTURE_DIR, -) +from tests.conftest import CODE_LIST_DIR +from tests.conftest import STATISTICAL_SUBJECT_STRUCTURE_DIR from tests.utils import TEST_RESOURCES_DIRECTORY diff --git a/tests/test_model.py b/tests/test_model.py index eab4df5b..74f396fe 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -1,7 +1,6 @@ """Verify that we are in sync with the Model.""" -from datadoc_model.model import Dataset -from datadoc_model.model import Variable +from dataset import model from datadoc.frontend.fields.display_dataset import DISPLAY_DATASET from datadoc.frontend.fields.display_dataset import DatasetIdentifiers @@ -12,7 +11,7 @@ def test_dataset_metadata_definition_parity(): """The metadata fields are currently defined in multiple places for technical reasons. We want these to always be exactly identical.""" datadoc_values = sorted([i.value for i in DatasetIdentifiers]) - model_values = sorted(Dataset().model_dump().keys()) + model_values = sorted(model.Dataset().model_dump().keys()) # TODO @Jorgen-5: Fields that are currently not supported by datadoc # noqa: TD003 model_values.remove("custom_type") @@ -24,7 +23,7 @@ def test_dataset_metadata_definition_parity(): def test_variables_metadata_definition_parity(): """The metadata fields are currently defined in multiple places for technical reasons. We want these to always be exactly identical.""" datadoc_values = sorted([i.value for i in VariableIdentifiers]) - model_values = sorted(Variable().model_dump().keys()) + model_values = sorted(model.Variable().model_dump().keys()) # TODO @Jorgen-5: Fields that are currently not supported by datadoc # noqa: TD003 model_values.remove("custom_type")