diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8db85e7c..ee2e5d69 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,7 +30,7 @@ jobs: - name: Install Poetry run: | - pipx install --pip-args "-c .github/workflows/constraints.txt" poetry + pipx install --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" poetry poetry --version - name: Set up Python diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ed8fbfd9..5d65bf2f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -34,12 +34,12 @@ jobs: - name: Upgrade pip run: | - pip install -c .github/workflows/constraints.txt pip + pip install -c ${{ github.workspace }}/.github/workflows/constraints.txt pip pip --version - name: Install Poetry run: | - pip install -c .github/workflows/constraints.txt poetry + pip install -c ${{ github.workspace }}/.github/workflows/constraints.txt poetry poetry --version - name: Check if there is a parent commit diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 990cf901..f8a94be5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -48,7 +48,7 @@ jobs: - name: Upgrade pip run: | - pip install -c .github/workflows/constraints.txt pip + pip install -c ${{ github.workspace }}/.github/workflows/constraints.txt pip pip --version - name: Upgrade pip in virtual environments @@ -62,13 +62,13 @@ jobs: - name: Install Poetry run: | - pipx install --pip-args "-c .github/workflows/constraints.txt" poetry + pipx install --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" poetry poetry --version - name: Install Nox run: | - pipx install --pip-args "-c .github/workflows/constraints.txt" nox - pipx inject --pip-args "-c .github/workflows/constraints.txt" nox nox-poetry + pipx install --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" nox + pipx inject --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" nox nox-poetry nox --version - name: Compute pre-commit cache key @@ -131,18 +131,18 @@ jobs: - name: Upgrade pip run: | - pip install -c .github/workflows/constraints.txt pip + pip install -c ${{ github.workspace }}/.github/workflows/constraints.txt pip pip --version - name: Install Poetry run: | - pipx install --pip-args "-c .github/workflows/constraints.txt" poetry + pipx install --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" poetry poetry --version - name: Install Nox run: | - pipx install --pip-args "-c .github/workflows/constraints.txt" nox - pipx inject --pip-args "-c .github/workflows/constraints.txt" nox nox-poetry + pipx install --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" nox + pipx inject --pip-args "-c ${{ github.workspace }}/.github/workflows/constraints.txt" nox nox-poetry nox --version - name: Download coverage data diff --git a/src/datadoc/backend/dapla_dataset_path_info.py b/src/datadoc/backend/dapla_dataset_path_info.py index 71f565e4..b95ac463 100644 --- a/src/datadoc/backend/dapla_dataset_path_info.py +++ b/src/datadoc/backend/dapla_dataset_path_info.py @@ -450,3 +450,15 @@ def statistic_short_name( if i in dataset_path_parts and dataset_path_parts.index(i) != 0: return dataset_path_parts[dataset_path_parts.index(i) - 1] return None + + def path_complies_with_naming_standard(self) -> bool: + """Checks if path is valid according to SSB standard.""" + if ( + self.dataset_state + and self.statistic_short_name + and self.contains_data_from + and self.contains_data_until + and self.dataset_version + ): + return True + return False diff --git a/tests/backend/test_dapla_dataset_path_info.py b/tests/backend/test_dapla_dataset_path_info.py index 732ba619..17331f4a 100644 --- a/tests/backend/test_dapla_dataset_path_info.py +++ b/tests/backend/test_dapla_dataset_path_info.py @@ -279,3 +279,29 @@ def test_date_format_correct_end_date(date_format, period, expected): ) def test_extract_shortname_in_path(data: str, expected: str): assert DaplaDatasetPathInfo(data).statistic_short_name == expected + + +@pytest.mark.parametrize( + ("data"), + [ + "gs://ssb-staging-dapla-felles-data-delt/person_data_p2022_v1.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/person_data_v1.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/person_data_p2021_v3.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_v1.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021.parquet", + ], +) +def test_path_complies_with_naming_standard_invalid_input(data: str): + assert DaplaDatasetPathInfo(data).path_complies_with_naming_standard() is False + + +@pytest.mark.parametrize( + ("data"), + [ + "gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_v2.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/person_data_p2021_p2022_v2.parquet", + "gs://ssb-staging-dapla-felles-data-delt/datadoc/utdata/undermappe/person_data_p2021_v2.parquet", + ], +) +def test_path_complies_with_naming_standard_valid_input(data: str): + assert DaplaDatasetPathInfo(data).path_complies_with_naming_standard() is True