Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test for DAPLA naming convention #269

Merged
merged 5 commits into from
Apr 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/datadoc/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from datadoc.enums import SupportedLanguages
from datadoc.frontend.callbacks.register_callbacks import register_callbacks
from datadoc.frontend.components.alerts import dataset_validation_error
from datadoc.frontend.components.alerts import naming_convention_warning
from datadoc.frontend.components.alerts import opened_dataset_error
from datadoc.frontend.components.alerts import opened_dataset_success
from datadoc.frontend.components.alerts import saved_metadata_success
Expand Down Expand Up @@ -65,6 +66,7 @@ def build_app(app: type[Dash]) -> Dash:
opened_dataset_error,
saved_metadata_success,
opened_dataset_success,
naming_convention_warning,
dbc.Tabs(
id="tabs",
class_name="ssb-tabs",
Expand Down
5 changes: 5 additions & 0 deletions src/datadoc/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,8 @@ def get_unit_code() -> int | None:
def get_organisational_unit_code() -> int | None:
"""The code for the organisational units code list in Klass."""
return int(_get_config_item("DATADOC_ORGANISATIONAL_UNIT_CODE") or 83)


def get_dapla_manual_naming_standard_url() -> str | None:
"""Get the URL to naming standard in the DAPLA manual."""
return _get_config_item("DAPLA_MANUAL_NAMING_STANDARD_URL")
15 changes: 10 additions & 5 deletions src/datadoc/frontend/callbacks/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from pydantic import ValidationError

from datadoc import state
from datadoc.backend.dapla_dataset_path_info import DaplaDatasetPathInfo
from datadoc.backend.datadoc_metadata import DataDocMetadata
from datadoc.enums import (
SupportedLanguages, # noqa: TCH001 import is needed for docs build
Expand Down Expand Up @@ -53,32 +54,36 @@ def open_file(file_path: str | None = None) -> DataDocMetadata:
def open_dataset_handling(
n_clicks: int,
file_path: str,
) -> tuple[bool, bool, str, str]:
) -> tuple[bool, bool, bool, str, str]:
"""Handle errors and other logic around opening a dataset file."""
if file_path:
file_path = file_path.strip()

try:
state.metadata = open_file(file_path)
except FileNotFoundError:
logger.exception("File %s not found", str(file_path))
return (
False,
True,
False,
f"Filen '{file_path}' finnes ikke.",
state.current_metadata_language.value,
)
except Exception as e: # noqa: BLE001
return (
False,
True,
False,
"\n".join(traceback.format_exception_only(type(e), e)),
state.current_metadata_language.value,
)
if n_clicks and n_clicks > 0:
return True, False, "", state.current_metadata_language.value

return False, False, "", state.current_metadata_language.value
dapla_dataset_path_info = DaplaDatasetPathInfo(file_path)
if not dapla_dataset_path_info.path_complies_with_naming_standard():
return (True, False, True, "", state.current_metadata_language.value)
return True, False, False, "", state.current_metadata_language.value
# no message
return False, False, False, "", state.current_metadata_language.value


def process_keyword(value: str) -> list[str]:
Expand Down
3 changes: 2 additions & 1 deletion src/datadoc/frontend/callbacks/register_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ def callback_accept_dataset_metadata_input(
@app.callback(
Output("opened-dataset-success", "is_open"),
Output("opened-dataset-error", "is_open"),
Output("opened-dataset_warning", "is_open"),
Output("opened-dataset-error-explanation", "children"),
Output("language-dropdown", "value"), # Used to force reload of metadata
Input("open-button", "n_clicks"),
Expand All @@ -118,7 +119,7 @@ def callback_accept_dataset_metadata_input(
def callback_open_dataset(
n_clicks: int,
dataset_path: str,
) -> tuple[bool, bool, str, str]:
) -> tuple[bool, bool, bool, str, str]:
"""Open a dataset.

Shows an alert on success or failure.
Expand Down
15 changes: 12 additions & 3 deletions src/datadoc/frontend/components/alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,26 @@

from __future__ import annotations

from datadoc.config import get_dapla_manual_naming_standard_url
from datadoc.frontend.components.builders import AlertTypes
from datadoc.frontend.components.builders import build_ssb_alert

dataset_validation_error = build_ssb_alert(
AlertTypes.WARNING,
AlertTypes.ERROR,
"dataset-validation-error",
"Validering feilet",
"dataset-validation-explanation",
)

variables_validation_error = build_ssb_alert(
AlertTypes.WARNING,
AlertTypes.ERROR,
"variables-validation-error",
"Validering feilet",
"variables-validation-explanation",
)

opened_dataset_error = build_ssb_alert(
AlertTypes.WARNING,
AlertTypes.ERROR,
"opened-dataset-error",
"Kunne ikke åpne datasettet",
"opened-dataset-error-explanation",
Expand All @@ -39,3 +40,11 @@
"Åpnet datasett",
"opened-dataset-success-explanation",
)

naming_convention_warning = build_ssb_alert(
AlertTypes.WARNING,
"opened-dataset_warning",
"Filen følger ikke navnestandard. Vennlist se",
"opened-dataset-warning-explanation",
link=get_dapla_manual_naming_standard_url(),
)
9 changes: 8 additions & 1 deletion src/datadoc/frontend/components/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ class AlertTypes(Enum):

SUCCESS = auto()
WARNING = auto()
ERROR = auto()


@dataclass
Expand All @@ -42,9 +43,13 @@ def get_type(alert_type: AlertTypes) -> AlertType:


ALERT_TYPES = {
AlertTypes.ERROR: AlertType(
alert_class_name="ssb-dialog error",
color="danger",
),
AlertTypes.WARNING: AlertType(
alert_class_name="ssb-dialog warning",
color="danger",
color="warning",
),
AlertTypes.SUCCESS: AlertType(
alert_class_name="ssb-dialog",
Expand Down Expand Up @@ -73,6 +78,7 @@ def build_ssb_alert( # noqa: PLR0913 not immediately obvious how to improve thi
message: str | None = None,
*,
start_open: bool = False,
link: str | None = None,
) -> dbc.Alert:
"""Make a Dash Alert according to SSBs Design System."""
alert = AlertType.get_type(alert_type)
Expand All @@ -91,6 +97,7 @@ def build_ssb_alert( # noqa: PLR0913 not immediately obvious how to improve thi
id=content_identifier,
children=message,
),
html.A(link, href=link, target="_blank"),
],
style={"width": "70%"},
)
Expand Down
40 changes: 34 additions & 6 deletions tests/frontend/callbacks/test_dataset_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ def file_path():
return "valid/path/to/file.json"


@pytest.fixture()
def file_path_without_dates():
return "valid/path/to/person_data_v1.parquet"


@pytest.mark.parametrize(
("metadata_identifier", "provided_value", "expected_model_value"),
[
Expand Down Expand Up @@ -278,13 +283,13 @@ def test_open_dataset_handling_normal(
):
state.current_metadata_language = SupportedLanguages.ENGLISH

opened, show_error, error_msg, language = open_dataset_handling(
opened, show_error, naming_standard, error_msg, language = open_dataset_handling(
n_clicks_1,
file_path,
)

assert opened
assert not show_error
assert naming_standard
assert error_msg == ""
assert language == "en"

Expand All @@ -298,12 +303,13 @@ def test_open_dataset_handling_file_not_found(
state.current_metadata_language = SupportedLanguages.ENGLISH
open_file_mock.side_effect = FileNotFoundError()

opened, show_error, error_msg, language = open_dataset_handling(
opened, show_error, naming_standard, error_msg, language = open_dataset_handling(
n_clicks_1,
file_path,
)
assert not opened
assert show_error
assert not naming_standard
assert error_msg.startswith(f"Filen '{file_path}' finnes ikke.")
assert language == "en"

Expand All @@ -317,12 +323,13 @@ def test_open_dataset_handling_general_exception(
state.current_metadata_language = SupportedLanguages.ENGLISH
open_file_mock.side_effect = ValueError()

opened, show_error, error_msg, language = open_dataset_handling(
opened, show_error, naming_standard, error_msg, language = open_dataset_handling(
n_clicks_1,
file_path,
)
assert not opened
assert show_error
assert not naming_standard
assert error_msg.startswith("ValueError")
assert language == "en"

Expand All @@ -333,10 +340,31 @@ def test_open_dataset_handling_no_click(
file_path: str,
):
state.current_metadata_language = SupportedLanguages.ENGLISH
opened, show_error, error_msg, language = open_dataset_handling(0, file_path)

opened, show_error, naming_standard, error_msg, language = open_dataset_handling(
0,
file_path,
)
assert not opened
assert not show_error
assert not naming_standard
assert error_msg == ""
assert language == "en"


@patch(f"{DATASET_CALLBACKS_MODULE}.open_file")
def test_open_dataset_handling_naming_standard(
open_file_mock: Mock, # noqa: ARG001
n_clicks_1: int,
file_path_without_dates: str,
):
state.current_metadata_language = SupportedLanguages.ENGLISH
opened, show_error, naming_standard, error_msg, language = open_dataset_handling(
n_clicks_1,
file_path_without_dates,
)
assert opened is True
assert not show_error
assert naming_standard
assert error_msg == ""
assert language == "en"

Expand Down