Skip to content

Commit

Permalink
Support document_status updates in the backend (#212)
Browse files Browse the repository at this point in the history
* Support `document_status` updates in the backend

So that we can use this endpoint during ingests to update statuses rather
than direct database interaction

* Add check that deleted row does not change

* Move publisher to its own endpoint

This adds an extendable function that can be called when a document has
finished being run through the pipeline. For now that is just an update
to the document_status to publish it

* Revert having document status on update response
  • Loading branch information
olaughter authored Jan 18, 2024
1 parent 318d4ad commit db7ac85
Show file tree
Hide file tree
Showing 5 changed files with 167 additions and 17 deletions.
65 changes: 49 additions & 16 deletions app/api/api_v1/routers/admin.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
from typing import cast

from fastapi import (
APIRouter,
Expand All @@ -8,26 +9,70 @@
status,
)
from sqlalchemy import update
from sqlalchemy import Column

from app.db.models.law_policy import DocumentStatus
from app.api.api_v1.schemas.document import (
DocumentUpdateRequest,
)
from app.core.auth import get_superuser_details
from app.core.validation import IMPORT_ID_MATCHER
from app.core.lookups import get_family_document_by_import_id_or_slug
from app.db.models.document.physical_document import (
LanguageSource,
PhysicalDocument,
Language,
PhysicalDocumentLanguage,
)
from app.db.models.law_policy.family import FamilyDocument, Slug
from app.db.session import get_db

_LOGGER = logging.getLogger(__name__)

admin_document_router = r = APIRouter()


@r.post("/documents/{import_id_or_slug}/processed", status_code=status.HTTP_200_OK)
async def update_document_status(
request: Request,
import_id_or_slug: str,
db=Depends(get_db),
current_user=Depends(get_superuser_details),
):
_LOGGER.info(
f"Superuser '{current_user.email}' called update_document_status",
extra={
"props": {
"superuser_email": current_user.email,
"import_id_or_slug": import_id_or_slug,
}
},
)

family_document = get_family_document_by_import_id_or_slug(db, import_id_or_slug)
if family_document is None:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
)

if family_document.document_status == DocumentStatus.CREATED:
family_document.document_status = cast(Column, DocumentStatus.PUBLISHED)
_LOGGER.info(
"Publishing family document",
extra={
"props": {
"superuser_email": current_user.email,
"import_id_or_slug": import_id_or_slug,
"result": family_document.document_status,
}
},
)

db.commit()
return {
"import_id": family_document.import_id,
"document_status": family_document.document_status,
}


@r.put("/documents/{import_id_or_slug}", status_code=status.HTTP_200_OK)
async def update_document(
request: Request,
Expand All @@ -50,21 +95,9 @@ async def update_document(
)

# First query the FamilyDocument
query = db.query(FamilyDocument)
if IMPORT_ID_MATCHER.match(import_id_or_slug) is not None:
family_document = query.filter(
FamilyDocument.import_id == import_id_or_slug
).one_or_none()
_LOGGER.info("update_document called with import_id")
else:
family_document = (
query.join(Slug, Slug.family_document_import_id == FamilyDocument.import_id)
.filter(Slug.name == import_id_or_slug)
.one_or_none()
)
_LOGGER.info("update_document called with slug")

family_document = get_family_document_by_import_id_or_slug(db, import_id_or_slug)
# Check we have found one

if family_document is None:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
Expand Down
2 changes: 1 addition & 1 deletion app/api/api_v1/schemas/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from typing import Any, Mapping, Optional, Sequence, Union

from pydantic import field_validator, ConfigDict, BaseModel
from . import CLIMATE_LAWS_MATCH

from . import CLIMATE_LAWS_MATCH

Json = dict[str, Any]

Expand Down
21 changes: 21 additions & 0 deletions app/core/lookups.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from app.core.organisation import get_organisation_taxonomy_by_name

from app.core.util import tree_table_to_json
from app.core.validation import IMPORT_ID_MATCHER
from app.db.models.app.users import Organisation
from app.db.models.document.physical_document import Language
from app.db.models.law_policy import (
Expand All @@ -14,6 +15,8 @@
FamilyDocumentType,
Variant,
)
from app.db.models.law_policy.family import FamilyDocument, Slug


import logging

Expand Down Expand Up @@ -117,3 +120,21 @@ def is_country_code(db: Session, country_code: str) -> bool:
return False

return bool(country_code is not None)


def get_family_document_by_import_id_or_slug(
db: Session, import_id_or_slug: str
) -> Optional[FamilyDocument]:
query = db.query(FamilyDocument)
is_import_id = IMPORT_ID_MATCHER.match(import_id_or_slug) is not None
if is_import_id:
family_document = query.filter(
FamilyDocument.import_id == import_id_or_slug
).one_or_none()
else:
family_document = (
query.join(Slug, Slug.family_document_import_id == FamilyDocument.import_id)
.filter(Slug.name == import_id_or_slug)
.one_or_none()
)
return family_document
5 changes: 5 additions & 0 deletions tests/routes/document_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@
)


TWO_UNPUBLISHED_DFC_ROW = """ID,Document ID,CCLW Description,Part of collection?,Create new family/ies?,Collection ID,Collection name,Collection summary,Document title,Family name,Family summary,Family ID,Document role,Applies to ID,Geography ISO,Documents,Category,Events,Sectors,Instruments,Frameworks,Responses,Natural Hazards,Document Type,Year,Language,Keywords,Geography,Parent Legislation,Comment,CPR Document ID,CPR Family ID,CPR Collection ID,CPR Family Slug,CPR Document Slug,Document variant,CPR Document Status
1001,0,Test1,FALSE,FALSE,N/A,Collection1,CollectionSummary1,Title1,Fam1,Summary1,,MAIN,,GBR,http://somewhere|en,executive,02/02/2014|Law passed,Energy,,,Mitigation,,Order,,,Energy Supply,Algeria,,,CCLW.executive.1.2,CCLW.family.1001.0,CPR.Collection.1,FamSlug1,DocSlug1,Translation,CREATED
2002,0,Test2,FALSE,FALSE,N/A,Collection1,CollectionSummary1,Title2,Fam2,Summary2,,MAIN,,GBR,http://another_somewhere|en,executive,03/03/2024|Law passed,Energy,,,Mitigation,,Order,,,Energy Supply,Algeria,,,CCLW.executive.2.2,CCLW.family.2002.0,CPR.Collection.1,FamSlug2,DocSlug2,,DELETED
"""

ONE_DFC_ROW = """ID,Document ID,CCLW Description,Part of collection?,Create new family/ies?,Collection ID,Collection name,Collection summary,Document title,Family name,Family summary,Family ID,Document role,Applies to ID,Geography ISO,Documents,Category,Events,Sectors,Instruments,Frameworks,Responses,Natural Hazards,Document Type,Year,Language,Keywords,Geography,Parent Legislation,Comment,CPR Document ID,CPR Family ID,CPR Collection ID,CPR Family Slug,CPR Document Slug,Document variant,CPR Document Status
1001,0,Test1,FALSE,FALSE,N/A,Collection1,CollectionSummary1,Title1,Fam1,Summary1,,MAIN,,GBR,http://somewhere|en,executive,02/02/2014|Law passed,Energy,,,Mitigation,,Order,,,Energy Supply,Algeria,,,CCLW.executive.1.2,CCLW.family.1001.0,CPR.Collection.1,FamSlug1,DocSlug1,Translation,PUBLISHED
"""
Expand Down
91 changes: 91 additions & 0 deletions tests/routes/test_document_families.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from tests.routes.document_helpers import (
ONE_DFC_ROW_TWO_LANGUAGES,
ONE_EVENT_ROW,
TWO_UNPUBLISHED_DFC_ROW,
TWO_DFC_ROW_DIFFERENT_ORG,
TWO_DFC_ROW_ONE_LANGUAGE,
TWO_DFC_ROW_NON_MATCHING_IDS,
Expand Down Expand Up @@ -336,6 +337,96 @@ def test_physical_doc_multiple_languages(
assert set(document["languages"]) == set(["fra", "eng"])


def test_update_document_status__is_secure(
client: TestClient,
test_db: Session,
mocker: Callable[..., Generator[MockerFixture, None, None]],
):
setup_with_two_docs(test_db, mocker)

import_id = "CCLW.executive.1.2"
response = client.post(f"/api/v1/admin/documents/{import_id}/processed")
assert response.status_code == 401


@pytest.mark.parametrize(
"import_id",
[
"CCLW.executive.12",
"UNFCCC.s.ill.y.2.2",
],
)
def test_update_document_status__fails_on_non_matching_import_id(
client: TestClient,
superuser_token_headers: dict[str, str],
test_db: Session,
mocker: Callable[..., Generator[MockerFixture, None, None]],
import_id: str,
):
setup_with_multiple_docs(
test_db,
mocker,
doc_data=TWO_DFC_ROW_NON_MATCHING_IDS,
event_data=TWO_EVENT_ROWS,
)

response = client.post(
f"/api/v1/admin/documents/{import_id}/processed",
headers=superuser_token_headers,
)

assert response.status_code == 422


def test_update_document_status__publishes_document(
client: TestClient,
superuser_token_headers: dict[str, str],
test_db: Session,
mocker: Callable[..., Generator[MockerFixture, None, None]],
):
"""Test that we can send a payload to the backend to update family_document.document_status"""
setup_with_multiple_docs(
test_db, mocker, doc_data=TWO_UNPUBLISHED_DFC_ROW, event_data=TWO_EVENT_ROWS
)
UPDATE_IMPORT_ID = "CCLW.executive.1.2"
UNCHANGED_IMPORT_ID = "CCLW.executive.2.2"

# State of db beforehand
pre_family_status = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == UPDATE_IMPORT_ID)
.one()
.document_status
)

response = client.post(
f"/api/v1/admin/documents/{UPDATE_IMPORT_ID}/processed",
headers=superuser_token_headers,
)

assert response.status_code == 200
json_object = response.json()

assert json_object["import_id"] == UPDATE_IMPORT_ID
assert json_object["document_status"] == "Published"

# Now Check the db
updated_family = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == UPDATE_IMPORT_ID)
.one()
)
assert updated_family.document_status == "Published"
assert updated_family.document_status != pre_family_status

unchanged_family = (
test_db.query(FamilyDocument)
.filter(FamilyDocument.import_id == UNCHANGED_IMPORT_ID)
.one()
)
assert unchanged_family.document_status == "Deleted"


def test_update_document__is_secure(
client: TestClient,
test_db: Session,
Expand Down

0 comments on commit db7ac85

Please sign in to comment.