Skip to content

Commit

Permalink
Use ssb-datadoc-model v3.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
mmwinther committed Oct 11, 2023
1 parent 3d77393 commit 99bf67f
Show file tree
Hide file tree
Showing 12 changed files with 1,539 additions and 1,232 deletions.
18 changes: 13 additions & 5 deletions datadoc/backend/datadoc_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,18 @@

# These don't vary at runtime so we calculate them as constants here
NUM_OBLIGATORY_DATASET_FIELDS = len(
[k for k in Model.DataDocDataSet().dict() if k in OBLIGATORY_DATASET_METADATA],
[
k
for k in Model.DataDocDataSet().model_dump()
if k in OBLIGATORY_DATASET_METADATA
],
)
NUM_OBLIGATORY_VARIABLES_FIELDS = len(
[k for k in Model.DataDocVariable().dict() if k in OBLIGATORY_VARIABLES_METADATA],
[
k
for k in Model.DataDocVariable().model_dump()
if k in OBLIGATORY_VARIABLES_METADATA
],
)

METADATA_DOCUMENT_FILE_SUFFIX = "__DOC.json"
Expand Down Expand Up @@ -210,7 +218,7 @@ def write_metadata_document(self: t.Self @ DataDocMetadata) -> None:
self.meta.dataset.metadata_created_by = self.current_user
self.meta.dataset.metadata_last_updated_date = timestamp
self.meta.dataset.metadata_last_updated_by = self.current_user
self.metadata_document.write_text(self.meta.json(indent=4, sort_keys=False))
self.metadata_document.write_text(self.meta.model_dump_json(indent=4))
logger.info("Saved metadata document %s", self.metadata_document.location)

@property
Expand All @@ -225,7 +233,7 @@ def percent_complete(self: t.Self @ DataDocMetadata) -> int:
num_set_fields = len(
[
k
for k, v in self.meta.dataset.dict().items()
for k, v in self.meta.dataset.model_dump().items()
if k in OBLIGATORY_DATASET_METADATA and v is not None
],
)
Expand All @@ -235,7 +243,7 @@ def percent_complete(self: t.Self @ DataDocMetadata) -> int:
num_set_fields += len(
[
k
for k, v in variable.dict().items()
for k, v in variable.model_dump().items()
if k in OBLIGATORY_VARIABLES_METADATA and v is not None
],
)
Expand Down
9 changes: 7 additions & 2 deletions datadoc/backend/model_backwards_compatibility.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,20 @@ def handle_version_0_1_1(supplied_metadata: dict) -> dict:
supplied_metadata["dataset"][new_key] = supplied_metadata["dataset"].pop(
old_key,
)

# Replace empty strings with None, empty strings are not valid for LanguageStrings values
supplied_metadata["dataset"] = {
k: None if v == "" else v for k, v in supplied_metadata["dataset"].items()
}
return supplied_metadata


# Register all the supported versions and their handlers
BackwardsCompatibleVersion(version="0.1.1", handler=handle_version_0_1_1)
BackwardsCompatibleVersion(
version="1",
version="1", # Some documents exist with incorrect version specification
handler=handle_version_0_1_1,
) # Some documents exist with incorrect version specification
)


def upgrade_metadata(fresh_metadata: dict, current_model_version: str) -> dict:
Expand Down
5 changes: 4 additions & 1 deletion datadoc/frontend/fields/display_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,10 @@ def kwargs_factory() -> dict[str, t.Any]:

def get_standard_metadata(metadata: BaseModel, identifier: str) -> MetadataInputTypes:
"""Get a metadata value from the model."""
return metadata.dict()[identifier]
value = metadata.model_dump()[identifier]
if value is None:
return None
return str(value)


def get_metadata_and_stringify(metadata: BaseModel, identifier: str) -> str:
Expand Down
6 changes: 4 additions & 2 deletions datadoc/frontend/fields/display_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from __future__ import annotations

import logging
import typing
from enum import Enum

from dash import dcc
Expand Down Expand Up @@ -249,7 +250,8 @@ class DatasetIdentifiers(str, Enum):
m for m in DISPLAYED_DATASET_METADATA if m.component == dcc.Dropdown
]

types = typing.get_type_hints(Model.DataDocDataSet)

DISPLAYED_DROPDOWN_DATASET_ENUMS = [
Model.DataDocDataSet.__fields__[m.identifier].type_
for m in DISPLAYED_DROPDOWN_DATASET_METADATA
typing.get_args(types[m.identifier])[0] for m in DISPLAYED_DROPDOWN_DATASET_METADATA
]
5 changes: 4 additions & 1 deletion datadoc/frontend/fields/display_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from __future__ import annotations

import typing
from enum import Enum

from datadoc_model import Model
Expand Down Expand Up @@ -155,8 +156,10 @@ class VariableIdentifiers(str, Enum):

DISPLAYED_DROPDOWN_VARIABLES_TYPES = []

types = typing.get_type_hints(Model.DataDocVariable)

for m in DISPLAY_VARIABLES.values():
if m.presentation == "dropdown":
field_type = Model.DataDocVariable.__fields__[m.identifier].type_
field_type = typing.get_args(types[m.identifier])[0]
if issubclass(field_type, LanguageStringsEnum) or field_type is bool:
DISPLAYED_DROPDOWN_VARIABLES_TYPES.append(field_type)
2 changes: 1 addition & 1 deletion datadoc/tests/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def test_update_variable_table_dropdown_options_for_language():
options = update_variable_table_dropdown_options_for_language(
SupportedLanguages.NORSK_BOKMÅL,
)
assert all(k in DataDocVariable.__fields__ for k in options)
assert all(k in DataDocVariable.model_fields for k in options)
assert all(list(v.keys()) == ["options"] for v in options.values())
try:
assert all(
Expand Down
2 changes: 1 addition & 1 deletion datadoc/tests/test_datadoc_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def test_metadata_document_percent_complete(metadata: DataDocMetadata):
variable_2 = DataDocVariable(data_type=Enums.Datatype.INTEGER)
document = MetadataDocument(
percentage_complete=0,
document_version=1,
document_version="1.0.0",
dataset=dataset,
variables=[variable_1, variable_2],
)
Expand Down
6 changes: 4 additions & 2 deletions datadoc/tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@

def test_dataset_metadata_definition_parity():
"""The metadata fields are currently defined in multiple places for technical reasons. We want these to always be exactly identical."""
assert [i.value for i in DatasetIdentifiers] == list(DataDocDataSet().dict().keys())
assert [i.value for i in DatasetIdentifiers] == list(
DataDocDataSet().model_dump().keys(),
)
assert list(DatasetIdentifiers) == list(DISPLAY_DATASET.keys())


def test_variables_metadata_definition_parity():
"""The metadata fields are currently defined in multiple places for technical reasons. We want these to always be exactly identical."""
assert [i.value for i in VariableIdentifiers] == list(
DataDocVariable().dict().keys(),
DataDocVariable().model_dump().keys(),
)
assert list(VariableIdentifiers) == list(DISPLAY_VARIABLES.keys())
4 changes: 3 additions & 1 deletion datadoc/tests/test_model_backwards_compatibility.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ def test_backwards_compatibility(
in_file_values = [
v for v in file_metadata["dataset"].values() if v not in ["", None]
]
read_in_values = json.loads(metadata.meta.dataset.json(exclude_none=True)).values()
read_in_values = json.loads(
metadata.meta.dataset.model_dump_json(exclude_none=True),
).values()

missing_values = [v for v in in_file_values if v not in read_in_values]
if missing_values:
Expand Down
2 changes: 1 addition & 1 deletion datadoc/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def get_display_values(
return_dict = {}
for field_name, value in variable:
if isinstance(value, Model.LanguageStrings):
return_dict[field_name] = value.dict()[current_language.value]
return_dict[field_name] = value.model_dump()[current_language.value]
else:
return_dict[field_name] = value
return return_dict
Expand Down
Loading

0 comments on commit 99bf67f

Please sign in to comment.