Skip to content

Commit

Permalink
Correctly identifying updates and adding a unit test. (#156)
Browse files Browse the repository at this point in the history
* Correctly identifying updates and adding a unit test.

* Adding another test that adds the iso versions in the opposite order.

---------

Co-authored-by: Mark <[email protected]>
  • Loading branch information
THOR300 and Mark authored Jul 11, 2023
1 parent ac8058c commit 406f69a
Show file tree
Hide file tree
Showing 2 changed files with 226 additions and 1 deletion.
2 changes: 1 addition & 1 deletion app/api/api_v1/routers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ async def update_document(
}
},
)
if lang is not None and language not in existing_language_codes:
if lang is not None and lang.language_code not in existing_language_codes:
physical_document_language = PhysicalDocumentLanguage(
language_id=lang.id, document_id=physical_document.id
)
Expand Down
225 changes: 225 additions & 0 deletions tests/routes/test_document_families.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,6 +624,231 @@ def test_update_document__works_on_new_iso_639_1_language(
assert lang.language_code in expected_languages


# TODO Parametrize this test with the two languages as parameters
@pytest.mark.parametrize(
"import_id",
[
"CCLW.executive.1.2",
"UNFCCC.non-party.2.2",
],
)
def test_update_document__works_on_existing_iso_639_1_language(
client: TestClient,
superuser_token_headers: dict[str, str],
test_db: Session,
mocker: Callable[..., Generator[MockerFixture, None, None]],
import_id: str,
):
"""Assert that we can skip over existing languages for documents when using two letter iso codes.
Send two payloads in series to assert that if we add a 639 two letter iso code where there is already a
language entry for that physical document we don't throw an error. This proves that we can detect that the
two-letter iso code language already exists.
"""
setup_with_multiple_docs(
test_db, mocker, doc_data=TWO_DFC_ROW_DIFFERENT_ORG, event_data=TWO_EVENT_ROWS
)

# ADD THE FIRST LANGUAGE
payload = {
"md5_sum": "c184214e-4870-48e0-adab-3e064b1b0e76",
"content_type": "updated/content_type",
"cdn_object": "folder/file",
"languages": ["bod"],
}

response = client.put(
f"/api/v1/admin/documents/{import_id}",
headers=superuser_token_headers,
json=payload,
)

assert response.status_code == 200
json_object = response.json()
assert json_object["md5_sum"] == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert json_object["content_type"] == "updated/content_type"
assert json_object["cdn_object"] == "folder/file"
assert {language["language_code"] for language in json_object["languages"]} == {
"bod"
}

# Now Check the db
doc = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == import_id)
.one()
.physical_document
)
assert doc.md5_sum == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert doc.content_type == "updated/content_type"
assert doc.cdn_object == "folder/file"

languages = (
test_db.query(PhysicalDocumentLanguage)
.filter(PhysicalDocumentLanguage.document_id == doc.id)
.all()
)
assert len(languages) == 1
lang = test_db.query(Language).filter(Language.id == languages[0].language_id).one()
assert lang.language_code == "bod"

# NOW ADD THE SAME LANGUAGE AGAIN TO CHECK THAT THE UPDATE IS ADDITIVE AND WE SKIP OVER EXISTING LANGUAGES
payload = {
"md5_sum": "c184214e-4870-48e0-adab-3e064b1b0e76",
"content_type": "updated/content_type",
"cdn_object": "folder/file",
"languages": ["bo"],
}

response = client.put(
f"/api/v1/admin/documents/{import_id}",
headers=superuser_token_headers,
json=payload,
)

assert response.status_code == 200
json_object = response.json()
assert json_object["md5_sum"] == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert json_object["content_type"] == "updated/content_type"
assert json_object["cdn_object"] == "folder/file"
expected_languages = {"bod"}
assert {
lang["language_code"] for lang in json_object["languages"]
} == expected_languages

# Now Check the db
doc = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == import_id)
.one()
.physical_document
)
assert doc.md5_sum == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert doc.content_type == "updated/content_type"
assert doc.cdn_object == "folder/file"

doc_languages = (
test_db.query(PhysicalDocumentLanguage)
.filter(PhysicalDocumentLanguage.document_id == doc.id)
.all()
)
assert len(doc_languages) == 1
for doc_lang in doc_languages:
lang = test_db.query(Language).filter(Language.id == doc_lang.language_id).one()
assert lang.language_code in expected_languages


@pytest.mark.parametrize(
"import_id",
[
"CCLW.executive.1.2",
"UNFCCC.non-party.2.2",
],
)
def test_update_document__works_on_existing_iso_639_3_language(
client: TestClient,
superuser_token_headers: dict[str, str],
test_db: Session,
mocker: Callable[..., Generator[MockerFixture, None, None]],
import_id: str,
):
"""Assert that we can skip over existing languages for documents when using three letter iso codes.
Send two payloads in series to assert that if we add a 639 three letter iso code where there is already a
language entry for that physical document we don't throw an error. This proves that we can detect that the
three-letter iso code language already exists.
"""
setup_with_multiple_docs(
test_db, mocker, doc_data=TWO_DFC_ROW_DIFFERENT_ORG, event_data=TWO_EVENT_ROWS
)

# ADD THE FIRST LANGUAGE
payload = {
"md5_sum": "c184214e-4870-48e0-adab-3e064b1b0e76",
"content_type": "updated/content_type",
"cdn_object": "folder/file",
"languages": ["bo"],
}

response = client.put(
f"/api/v1/admin/documents/{import_id}",
headers=superuser_token_headers,
json=payload,
)

assert response.status_code == 200
json_object = response.json()
assert json_object["md5_sum"] == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert json_object["content_type"] == "updated/content_type"
assert json_object["cdn_object"] == "folder/file"
assert {language["language_code"] for language in json_object["languages"]} == {
"bod"
}

# Now Check the db
doc = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == import_id)
.one()
.physical_document
)
assert doc.md5_sum == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert doc.content_type == "updated/content_type"
assert doc.cdn_object == "folder/file"

languages = (
test_db.query(PhysicalDocumentLanguage)
.filter(PhysicalDocumentLanguage.document_id == doc.id)
.all()
)
assert len(languages) == 1
lang = test_db.query(Language).filter(Language.id == languages[0].language_id).one()
assert lang.language_code == "bod"

# NOW ADD THE SAME LANGUAGE AGAIN TO CHECK THAT THE UPDATE IS ADDITIVE AND WE SKIP OVER EXISTING LANGUAGES
payload = {
"md5_sum": "c184214e-4870-48e0-adab-3e064b1b0e76",
"content_type": "updated/content_type",
"cdn_object": "folder/file",
"languages": ["bod"],
}

response = client.put(
f"/api/v1/admin/documents/{import_id}",
headers=superuser_token_headers,
json=payload,
)

assert response.status_code == 200
json_object = response.json()
assert json_object["md5_sum"] == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert json_object["content_type"] == "updated/content_type"
assert json_object["cdn_object"] == "folder/file"
expected_languages = {"bod"}
assert {
lang["language_code"] for lang in json_object["languages"]
} == expected_languages

# Now Check the db
doc = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == import_id)
.one()
.physical_document
)
assert doc.md5_sum == "c184214e-4870-48e0-adab-3e064b1b0e76"
assert doc.content_type == "updated/content_type"
assert doc.cdn_object == "folder/file"

doc_languages = (
test_db.query(PhysicalDocumentLanguage)
.filter(PhysicalDocumentLanguage.document_id == doc.id)
.all()
)
assert len(doc_languages) == 1
for doc_lang in doc_languages:
lang = test_db.query(Language).filter(Language.id == doc_lang.language_id).one()
assert lang.language_code in expected_languages


@pytest.mark.parametrize(
"import_id",
[
Expand Down

0 comments on commit 406f69a

Please sign in to comment.