From 9a037365389e7a41531af1a55f98fd2ef9f9eabd Mon Sep 17 00:00:00 2001 From: Abbas Khan Date: Thu, 12 Dec 2024 17:12:37 +0000 Subject: [PATCH 1/3] update tests and lambda decorator --- .../base-lambdas-reusable-deploy-all.yml | 14 +++ lambdas/enums/document_retention.py | 6 + lambdas/enums/lambda_error.py | 12 ++ lambdas/enums/s3_lifecycle_tags.py | 12 -- .../delete_document_object_handler.py | 59 +++++++++ .../delete_document_reference_handler.py | 2 +- lambdas/services/document_deletion_service.py | 39 +++++- lambdas/services/document_service.py | 45 ++++--- lambdas/tests/unit/conftest.py | 6 + .../test_delete_document_object_handler.py | 118 ++++++++++++++++++ .../test_delete_document_reference_handler.py | 16 +-- .../test_document_reference_search_handler.py | 2 +- ...test_lloyd_george_record_stitch_handler.py | 4 - .../unit/helpers/data/dynamo/__init__.py | 0 .../data/{ => dynamo}/dynamo_responses.py | 0 .../data/{ => dynamo}/dynamo_scan_response.py | 0 .../unit/helpers/data/dynamo/dynamo_stream.py | 50 ++++++++ .../tests/unit/helpers/data/test_documents.py | 2 +- .../unit/models/test_document_reference.py | 2 +- .../unit/services/base/test_dynamo_service.py | 10 +- .../test_bulk_upload_report_service.py | 2 +- .../test_document_deletion_service.py | 75 +++++++++-- .../test_document_reference_search_service.py | 2 +- .../unit/services/test_document_service.py | 79 ++++++++---- lambdas/tests/unit/utils/test_dynamo_utils.py | 47 +++++++ .../utils/decorators/override_error_check.py | 4 +- .../validate_dynamo_stream_event.py | 47 +++++++ lambdas/utils/dynamo_utils.py | 16 +++ lambdas/utils/exceptions.py | 4 + 29 files changed, 575 insertions(+), 100 deletions(-) create mode 100644 lambdas/enums/document_retention.py delete mode 100644 lambdas/enums/s3_lifecycle_tags.py create mode 100644 lambdas/handlers/delete_document_object_handler.py create mode 100644 lambdas/tests/unit/handlers/test_delete_document_object_handler.py create mode 100644 lambdas/tests/unit/helpers/data/dynamo/__init__.py rename lambdas/tests/unit/helpers/data/{ => dynamo}/dynamo_responses.py (100%) rename lambdas/tests/unit/helpers/data/{ => dynamo}/dynamo_scan_response.py (100%) create mode 100644 lambdas/tests/unit/helpers/data/dynamo/dynamo_stream.py create mode 100644 lambdas/utils/decorators/validate_dynamo_stream_event.py diff --git a/.github/workflows/base-lambdas-reusable-deploy-all.yml b/.github/workflows/base-lambdas-reusable-deploy-all.yml index 195632f5b..3a62099e5 100644 --- a/.github/workflows/base-lambdas-reusable-deploy-all.yml +++ b/.github/workflows/base-lambdas-reusable-deploy-all.yml @@ -94,6 +94,20 @@ jobs: secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + deploy_delete_document_object_handler: + name: Deploy delete_document_object_handler + uses: ./.github/workflows/base-lambdas-reusable-deploy.yml + with: + environment: ${{ inputs.environment}} + python_version: ${{ inputs.python_version }} + build_branch: ${{ inputs.build_branch}} + sandbox: ${{ inputs.sandbox }} + lambda_handler_name: delete_document_object_handler + lambda_aws_name: DeleteDocumentObjectS3 + lambda_layer_names: 'core_lambda_layer' + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + deploy_document_manifest_job_lambda: name: Deploy document_manifest_job_lambda uses: ./.github/workflows/base-lambdas-reusable-deploy.yml diff --git a/lambdas/enums/document_retention.py b/lambdas/enums/document_retention.py new file mode 100644 index 000000000..349506d76 --- /dev/null +++ b/lambdas/enums/document_retention.py @@ -0,0 +1,6 @@ +from enum import IntEnum + + +class DocumentRetentionDays(IntEnum): + SOFT_DELETE = 56 + DEATH = 3650 diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index 2f5e28b39..c4a046bec 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -232,6 +232,14 @@ def to_str(self) -> str: """ Errors for DocumentDeletionServiceException """ + DocDelInvalidStreamEvent = { + "err_code": "DDS_4001", + "message": "Failed to delete document object", + } + DocDelObjectFailure = { + "err_code": "DDS_4002", + "message": "Failed to delete document object", + } DocDelClient = { "err_code": "DDS_5001", "message": "Failed to delete documents", @@ -470,6 +478,10 @@ def to_str(self) -> str: "err_code": "LGL_400", "message": "Incomplete record, Failed to create document manifest", } + DynamoInvalidStreamEvent = { + "err_code": "DBS_4001", + "message": "Failed to parse DynamoDb event stream", + } MockError = { "message": "Client error", diff --git a/lambdas/enums/s3_lifecycle_tags.py b/lambdas/enums/s3_lifecycle_tags.py deleted file mode 100644 index 3a4c83ca4..000000000 --- a/lambdas/enums/s3_lifecycle_tags.py +++ /dev/null @@ -1,12 +0,0 @@ -from enum import Enum, IntEnum - - -class S3LifecycleTags(Enum): - SOFT_DELETE = "soft-delete" - DEATH_DELETE = "patient-death" - ENABLE_TAG = "true" - - -class S3LifecycleDays(IntEnum): - SOFT_DELETE = 56 - DEATH_DELETE = 3650 diff --git a/lambdas/handlers/delete_document_object_handler.py b/lambdas/handlers/delete_document_object_handler.py new file mode 100644 index 000000000..95c1f4e32 --- /dev/null +++ b/lambdas/handlers/delete_document_object_handler.py @@ -0,0 +1,59 @@ +from enums.lambda_error import LambdaError +from enums.logging_app_interaction import LoggingAppInteraction +from models.document_reference import DocumentReference +from pydantic.v1 import ValidationError +from services.document_deletion_service import DocumentDeletionService +from utils.audit_logging_setup import LoggingService +from utils.decorators.handle_lambda_exceptions import handle_lambda_exceptions +from utils.decorators.override_error_check import override_error_check +from utils.decorators.set_audit_arg import set_request_context_for_logging +from utils.decorators.validate_dynamo_stream_event import validate_dynamo_stream +from utils.dynamo_utils import parse_dynamo_record +from utils.lambda_exceptions import DocumentDeletionServiceException +from utils.lambda_response import ApiGatewayResponse +from utils.request_context import request_context + +logger = LoggingService(__name__) + + +@set_request_context_for_logging +@override_error_check +@handle_lambda_exceptions +@validate_dynamo_stream +def lambda_handler(event, context): + request_context.app_interaction = LoggingAppInteraction.DELETE_RECORD.value + + logger.info( + "Delete Document Object handler has been triggered by DynamoDb REMOVE event" + ) + try: + event_record = event["Records"][0] + + event_type = event_record.get("eventName") + deleted_dynamo_reference = event_record.get("dynamodb").get("OldImage", {}) + + if event_type != "REMOVE" or not deleted_dynamo_reference: + logger.error( + "Failed to extract deleted record from DynamoDb stream", + {"Results": "Failed to delete document"}, + ) + raise DocumentDeletionServiceException( + 400, LambdaError.DynamoInvalidStreamEvent + ) + parsed_dynamo_record = parse_dynamo_record(deleted_dynamo_reference) + document = DocumentReference.model_validate(parsed_dynamo_record) + + deletion_service = DocumentDeletionService() + deletion_service.handle_object_delete(deleted_reference=document) + except (ValueError, ValidationError) as e: + logger.error( + f"Failed to parse Document Reference from deleted record: {str(e)}", + {"Results": "Failed to delete document"}, + ) + raise DocumentDeletionServiceException( + 400, LambdaError.DynamoInvalidStreamEvent + ) + + return ApiGatewayResponse( + 200, "Successfully deleted Document Reference object", "GET" + ).create_api_gateway_response() diff --git a/lambdas/handlers/delete_document_reference_handler.py b/lambdas/handlers/delete_document_reference_handler.py index 2d2b26a31..d041b5ab6 100644 --- a/lambdas/handlers/delete_document_reference_handler.py +++ b/lambdas/handlers/delete_document_reference_handler.py @@ -43,7 +43,7 @@ def lambda_handler(event, context): deletion_service = DocumentDeletionService() - files_deleted = deletion_service.handle_delete(nhs_number, document_types) + files_deleted = deletion_service.handle_reference_delete(nhs_number, document_types) if files_deleted: logger.info( "Documents were deleted successfully", {"Result": "Successful deletion"} diff --git a/lambdas/services/document_deletion_service.py b/lambdas/services/document_deletion_service.py index 09ba07e2d..5671bfafe 100644 --- a/lambdas/services/document_deletion_service.py +++ b/lambdas/services/document_deletion_service.py @@ -1,11 +1,12 @@ import os import uuid from typing import Literal +from urllib.parse import urlparse from botocore.exceptions import ClientError +from enums.document_retention import DocumentRetentionDays from enums.lambda_error import LambdaError from enums.nrl_sqs_upload import NrlActionTypes -from enums.s3_lifecycle_tags import S3LifecycleTags from enums.snomed_codes import SnomedCodesCategory, SnomedCodesType from enums.supported_document_types import SupportedDocumentTypes from models.document_reference import DocumentReference @@ -15,7 +16,7 @@ from services.lloyd_george_stitch_job_service import LloydGeorgeStitchJobService from utils.audit_logging_setup import LoggingService from utils.common_query_filters import NotDeleted -from utils.exceptions import DynamoServiceException +from utils.exceptions import DocumentServiceException, DynamoServiceException from utils.lambda_exceptions import DocumentDeletionServiceException logger = LoggingService(__name__) @@ -27,7 +28,7 @@ def __init__(self): self.stitch_service = LloydGeorgeStitchJobService() self.sqs_service = SQSService() - def handle_delete( + def handle_reference_delete( self, nhs_number: str, doc_types: list[SupportedDocumentTypes] ) -> list[DocumentReference]: files_deleted = [] @@ -38,6 +39,34 @@ def handle_delete( self.send_sqs_message_to_remove_pointer(nhs_number) return files_deleted + def handle_object_delete(self, deleted_reference: DocumentReference): + try: + s3_uri = deleted_reference.file_location + + parsed_uri = urlparse(s3_uri) + bucket_name = parsed_uri.netloc + object_key = parsed_uri.path.lstrip("/") + + if not bucket_name or not object_key: + raise DocumentDeletionServiceException( + 400, LambdaError.DocDelObjectFailure + ) + + self.document_service.delete_document_object( + bucket=bucket_name, key=object_key + ) + + logger.info( + "Successfully deleted Document Reference S3 Object", + {"Result": "Successful deletion"}, + ) + except DocumentServiceException as e: + logger.error( + str(e), + {"Results": "Failed to delete document"}, + ) + raise DocumentDeletionServiceException(400, LambdaError.DocDelObjectFailure) + def get_documents_references_in_storage( self, nhs_number: str, @@ -69,10 +98,10 @@ def delete_specific_doc_type( try: results = self.get_documents_references_in_storage(nhs_number, doc_type) if results: - self.document_service.delete_documents( + self.document_service.delete_document_references( table_name=doc_type.get_dynamodb_table_name(), document_references=results, - type_of_delete=str(S3LifecycleTags.SOFT_DELETE.value), + document_ttl_days=DocumentRetentionDays.SOFT_DELETE, ) logger.info( diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 5dfb3219a..7fbd1daaa 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -2,14 +2,17 @@ from boto3.dynamodb.conditions import Attr, ConditionBase from enums.metadata_field_names import DocumentReferenceMetadataFields -from enums.s3_lifecycle_tags import S3LifecycleDays, S3LifecycleTags from enums.supported_document_types import SupportedDocumentTypes from models.document_reference import DocumentReference from services.base.dynamo_service import DynamoDBService from services.base.s3_service import S3Service from utils.audit_logging_setup import LoggingService from utils.dynamo_utils import filter_uploaded_docs_and_recently_uploading_docs -from utils.exceptions import FileUploadInProgress, NoAvailableDocument +from utils.exceptions import ( + DocumentServiceException, + FileUploadInProgress, + NoAvailableDocument, +) logger = LoggingService(__name__) @@ -67,22 +70,15 @@ def fetch_documents_from_table_with_filter( documents.append(document) return documents - def delete_documents( + def delete_document_references( self, table_name: str, document_references: list[DocumentReference], - type_of_delete: str, + document_ttl_days: int, ): deletion_date = datetime.now(timezone.utc) - if type_of_delete == S3LifecycleTags.DEATH_DELETE.value: - ttl_days = S3LifecycleDays.DEATH_DELETE - tag_key = str(S3LifecycleTags.DEATH_DELETE.value) - else: - ttl_days = S3LifecycleDays.SOFT_DELETE - tag_key = str(S3LifecycleTags.SOFT_DELETE.value) - - ttl_seconds = ttl_days * 24 * 60 * 60 + ttl_seconds = document_ttl_days * 24 * 60 * 60 document_reference_ttl = int(deletion_date.timestamp() + ttl_seconds) update_fields = { @@ -95,17 +91,28 @@ def delete_documents( logger.info(f"Deleting items in table: {table_name}") for reference in document_references: - self.s3_service.create_object_tag( - file_key=reference.get_file_key(), - s3_bucket_name=reference.get_file_bucket(), - tag_key=tag_key, - tag_value=str(S3LifecycleTags.ENABLE_TAG.value), - ) - self.dynamo_service.update_item( table_name, reference.id, updated_fields=update_fields ) + def delete_document_object(self, bucket: str, key: str): + file_exists = self.s3_service.file_exist_on_s3( + s3_bucket_name=bucket, file_key=key + ) + + if not file_exists: + raise DocumentServiceException("Document does not exist in S3") + + logger.info("Located file, attempting S3 object deletion") + self.s3_service.delete_object(s3_bucket_name=bucket, file_key=key) + + file_exists = self.s3_service.file_exist_on_s3( + s3_bucket_name=bucket, file_key=key + ) + + if file_exists: + raise DocumentServiceException("Document located in S3 after deletion") + def update_documents( self, table_name: str, diff --git a/lambdas/tests/unit/conftest.py b/lambdas/tests/unit/conftest.py index 0ab97fc6b..bf18b5b93 100644 --- a/lambdas/tests/unit/conftest.py +++ b/lambdas/tests/unit/conftest.py @@ -6,6 +6,7 @@ from unittest import mock import pytest +from botocore.exceptions import ClientError from models.document_reference import DocumentReference from models.pds_models import Patient, PatientDetails from pydantic import ValidationError @@ -290,6 +291,11 @@ class MockError(Enum): } +MOCK_CLIENT_ERROR = ClientError( + {"Error": {"Code": 500, "Message": "Test error message"}}, "Query" +) + + @pytest.fixture def mock_temp_folder(mocker): temp_folder = tempfile.mkdtemp() diff --git a/lambdas/tests/unit/handlers/test_delete_document_object_handler.py b/lambdas/tests/unit/handlers/test_delete_document_object_handler.py new file mode 100644 index 000000000..a03014c7f --- /dev/null +++ b/lambdas/tests/unit/handlers/test_delete_document_object_handler.py @@ -0,0 +1,118 @@ +import json + +import pytest +from handlers.delete_document_object_handler import lambda_handler +from services.document_deletion_service import DocumentDeletionService +from tests.unit.conftest import MockError +from tests.unit.helpers.data.dynamo.dynamo_stream import ( + MOCK_DYNAMO_STREAM_EVENT, + MOCK_OLD_IMAGE_MODEL, +) +from utils.lambda_exceptions import DocumentDeletionServiceException +from utils.lambda_response import ApiGatewayResponse + + +@pytest.fixture +def mock_handle_delete(mocker): + yield mocker.patch.object(DocumentDeletionService, "handle_object_delete") + + +def test_lambda_handler_valid_dynamo_stream_event_successful_delete_returns_200( + set_env, context, mock_handle_delete +): + expected = ApiGatewayResponse( + 200, "Successfully deleted Document Reference object", "GET" + ).create_api_gateway_response() + + actual = lambda_handler(MOCK_DYNAMO_STREAM_EVENT, context) + + mock_handle_delete.assert_called_once_with(deleted_reference=MOCK_OLD_IMAGE_MODEL) + assert expected == actual + + +@pytest.mark.parametrize( + "invalid_event", + [ + {}, + {"Records": []}, + {"Records": [{"eventName": "INVALID", "dynamo": {}}]}, + {"Records": [{"eventName": "REMOVE", "dynamo": {"OldImage": {}}}]}, + ], +) +def test_lambda_handler_invalid_dynamo_stream_event_returns_400( + set_env, invalid_event, context, mock_handle_delete +): + expected_body = json.dumps( + { + "message": "Failed to parse DynamoDb event stream", + "err_code": "DBS_4001", + "interaction_id": "88888888-4444-4444-4444-121212121212", + } + ) + expected = ApiGatewayResponse( + 400, + expected_body, + "GET", + ).create_api_gateway_response() + + actual = lambda_handler(invalid_event, context) + + assert expected == actual + + +@pytest.mark.parametrize( + "invalid_event", + [ + { + "Records": [ + {"eventName": "REMOVE", "dynamo": {"OldImage": {"Invalid": "Invalid"}}} + ] + }, + { + "Records": [ + { + "eventName": "REMOVE", + "dynamo": { + "OldImage": { + "Deleted": {"S": "2024-12-05T11:58:53.527237Z"}, + } + }, + } + ] + }, + ], +) +def test_lambda_handler_invalid_dynamo_stream_image_returns_400( + set_env, invalid_event, context, mock_handle_delete, caplog +): + expected_body = json.dumps( + { + "message": "Failed to parse DynamoDb event stream", + "err_code": "DBS_4001", + "interaction_id": "88888888-4444-4444-4444-121212121212", + } + ) + expected = ApiGatewayResponse( + 400, + expected_body, + "GET", + ).create_api_gateway_response() + + actual = lambda_handler(invalid_event, context) + + assert expected == actual + + +def test_lambda_handler_handles_lambda_exception(set_env, context, mock_handle_delete): + mock_error = DocumentDeletionServiceException( + status_code=400, error=MockError.Error + ) + mock_handle_delete.side_effect = mock_error + expected_body = json.dumps(MockError.Error.value) + expected = ApiGatewayResponse( + 400, + expected_body, + "GET", + ).create_api_gateway_response() + actual = lambda_handler(MOCK_DYNAMO_STREAM_EVENT, context) + assert expected == actual diff --git a/lambdas/tests/unit/handlers/test_delete_document_reference_handler.py b/lambdas/tests/unit/handlers/test_delete_document_reference_handler.py index 0c8d83a7b..0933842da 100644 --- a/lambdas/tests/unit/handlers/test_delete_document_reference_handler.py +++ b/lambdas/tests/unit/handlers/test_delete_document_reference_handler.py @@ -1,10 +1,10 @@ import json -from enum import Enum import pytest from botocore.exceptions import ClientError from handlers.delete_document_reference_handler import lambda_handler from services.document_deletion_service import DocumentDeletionService +from tests.unit.conftest import MockError from tests.unit.helpers.data.test_documents import ( create_test_doc_store_refs, create_test_lloyd_george_doc_store_refs, @@ -16,12 +16,9 @@ TEST_LG_DOC_STORE_REFERENCES = create_test_lloyd_george_doc_store_refs() -class MockError(Enum): - Error = { - "message": "Client error", - "err_code": "AB_XXXX", - "interaction_id": "88888888-4444-4444-4444-121212121212", - } +@pytest.fixture +def mock_handle_delete(mocker): + yield mocker.patch.object(DocumentDeletionService, "handle_reference_delete") @pytest.mark.parametrize( @@ -256,8 +253,3 @@ def test_lambda_handler_handle_lambda_exception( ).create_api_gateway_response() actual = lambda_handler(valid_id_and_lg_doctype_delete_event, context) assert expected == actual - - -@pytest.fixture -def mock_handle_delete(mocker): - yield mocker.patch.object(DocumentDeletionService, "handle_delete") diff --git a/lambdas/tests/unit/handlers/test_document_reference_search_handler.py b/lambdas/tests/unit/handlers/test_document_reference_search_handler.py index 6953d6551..167c2cbe1 100755 --- a/lambdas/tests/unit/handlers/test_document_reference_search_handler.py +++ b/lambdas/tests/unit/handlers/test_document_reference_search_handler.py @@ -3,7 +3,7 @@ import pytest from handlers.document_reference_search_handler import lambda_handler -from tests.unit.helpers.data.dynamo_responses import EXPECTED_RESPONSE +from tests.unit.helpers.data.dynamo.dynamo_responses import EXPECTED_RESPONSE from utils.lambda_exceptions import DocumentRefSearchException from utils.lambda_response import ApiGatewayResponse diff --git a/lambdas/tests/unit/handlers/test_lloyd_george_record_stitch_handler.py b/lambdas/tests/unit/handlers/test_lloyd_george_record_stitch_handler.py index 8659d106e..0f69978f6 100755 --- a/lambdas/tests/unit/handlers/test_lloyd_george_record_stitch_handler.py +++ b/lambdas/tests/unit/handlers/test_lloyd_george_record_stitch_handler.py @@ -3,7 +3,6 @@ import tempfile import pytest -from botocore.exceptions import ClientError from enums.lambda_error import LambdaError from enums.trace_status import TraceStatus from handlers.lloyd_george_record_stitch_handler import ( @@ -20,9 +19,6 @@ from utils.lambda_exceptions import LGStitchServiceException from utils.lambda_response import ApiGatewayResponse -MOCK_CLIENT_ERROR = ClientError( - {"Error": {"Code": "500", "Message": "test error"}}, "testing" -) MOCK_LG_DYNAMODB_RESPONSE_NO_RECORD = {"Items": [], "Count": 0} MOCK_LLOYD_GEORGE_DOCUMENT_REFS = create_test_lloyd_george_doc_store_refs() MOCK_STITCHED_FILE = "filename_of_stitched_lg_in_local_storage.pdf" diff --git a/lambdas/tests/unit/helpers/data/dynamo/__init__.py b/lambdas/tests/unit/helpers/data/dynamo/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lambdas/tests/unit/helpers/data/dynamo_responses.py b/lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py similarity index 100% rename from lambdas/tests/unit/helpers/data/dynamo_responses.py rename to lambdas/tests/unit/helpers/data/dynamo/dynamo_responses.py diff --git a/lambdas/tests/unit/helpers/data/dynamo_scan_response.py b/lambdas/tests/unit/helpers/data/dynamo/dynamo_scan_response.py similarity index 100% rename from lambdas/tests/unit/helpers/data/dynamo_scan_response.py rename to lambdas/tests/unit/helpers/data/dynamo/dynamo_scan_response.py diff --git a/lambdas/tests/unit/helpers/data/dynamo/dynamo_stream.py b/lambdas/tests/unit/helpers/data/dynamo/dynamo_stream.py new file mode 100644 index 000000000..1f56677a0 --- /dev/null +++ b/lambdas/tests/unit/helpers/data/dynamo/dynamo_stream.py @@ -0,0 +1,50 @@ +from models.document_reference import DocumentReference +from tests.unit.conftest import ( + TEST_CURRENT_GP_ODS, + TEST_DOCUMENT_LOCATION, + TEST_FILE_KEY, + TEST_NHS_NUMBER, + TEST_UUID, +) +from utils.dynamo_utils import parse_dynamo_record + +MOCK_DYNAMO_STREAM_EVENT = { + "Records": [ + { + "eventID": "36ddd0fac61a24a164c69ee01a145e65", + "eventName": "REMOVE", + "eventVersion": "1.1", + "eventSource": "aws:dynamodb", + "awsRegion": "eu-west-2", + "dynamodb": { + "ApproximateCreationDateTime": 1733485731.0, + "Keys": {"ID": {"S": f"{TEST_UUID}"}}, + "OldImage": { + "ContentType": {"S": "application/pdf"}, + "FileName": {"S": f"{TEST_FILE_KEY}"}, + "Uploading": {"BOOL": False}, + "TTL": {"N": "1738238333"}, + "Created": {"S": "2024-11-27T16:30:11.532530Z"}, + "Uploaded": {"BOOL": True}, + "FileLocation": {"S": f"{TEST_DOCUMENT_LOCATION}"}, + "CurrentGpOds": {"S": f"{TEST_CURRENT_GP_ODS}"}, + "VirusScannerResult": {"S": "Clean"}, + "Deleted": {"S": "2024-12-05T11:58:53.527237Z"}, + "ID": {"S": f"{TEST_UUID}"}, + "LastUpdated": {"N": "1732725252"}, + "NhsNumber": {"S": f"{TEST_NHS_NUMBER}"}, + }, + "SequenceNumber": "1044384700000000014787528151", + "SizeBytes": 474, + "StreamViewType": "OLD_IMAGE", + }, + "eventSourceARN": "arn:aws:dynamodb:eu-west-2:5000000000:table/dynamo-table/stream/2024-12-06T12:00:00.000", + } + ] +} + +MOCK_OLD_IMAGE_EVENT = MOCK_DYNAMO_STREAM_EVENT["Records"][0]["dynamodb"]["OldImage"] + +MOCK_OLD_IMAGE_MODEL = DocumentReference.model_validate( + parse_dynamo_record(MOCK_OLD_IMAGE_EVENT) +) diff --git a/lambdas/tests/unit/helpers/data/test_documents.py b/lambdas/tests/unit/helpers/data/test_documents.py index 3b018edfb..429440a7a 100644 --- a/lambdas/tests/unit/helpers/data/test_documents.py +++ b/lambdas/tests/unit/helpers/data/test_documents.py @@ -10,7 +10,7 @@ TEST_NHS_NUMBER, TEST_UUID, ) -from tests.unit.helpers.data.dynamo_responses import MOCK_SEARCH_RESPONSE +from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE def create_test_doc_store_refs(): diff --git a/lambdas/tests/unit/models/test_document_reference.py b/lambdas/tests/unit/models/test_document_reference.py index 416e4113a..6e7923061 100644 --- a/lambdas/tests/unit/models/test_document_reference.py +++ b/lambdas/tests/unit/models/test_document_reference.py @@ -3,7 +3,7 @@ import pytest from freezegun import freeze_time from models.document_reference import DocumentReference -from tests.unit.helpers.data.dynamo_responses import MOCK_SEARCH_RESPONSE +from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE from utils.exceptions import InvalidDocumentReferenceException MOCK_DOCUMENT_REFERENCE = DocumentReference.model_validate( diff --git a/lambdas/tests/unit/services/base/test_dynamo_service.py b/lambdas/tests/unit/services/base/test_dynamo_service.py index 48f175f93..e27b13678 100755 --- a/lambdas/tests/unit/services/base/test_dynamo_service.py +++ b/lambdas/tests/unit/services/base/test_dynamo_service.py @@ -7,9 +7,9 @@ from enums.dynamo_filter import AttributeOperator from enums.metadata_field_names import DocumentReferenceMetadataFields from services.base.dynamo_service import DynamoDBService -from tests.unit.conftest import MOCK_TABLE_NAME, TEST_NHS_NUMBER -from tests.unit.helpers.data.dynamo_responses import MOCK_SEARCH_RESPONSE -from tests.unit.helpers.data.dynamo_scan_response import ( +from tests.unit.conftest import MOCK_CLIENT_ERROR, MOCK_TABLE_NAME, TEST_NHS_NUMBER +from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE +from tests.unit.helpers.data.dynamo.dynamo_scan_response import ( EXPECTED_ITEMS_FOR_PAGINATED_RESULTS, MOCK_PAGINATED_RESPONSE_1, MOCK_PAGINATED_RESPONSE_2, @@ -19,10 +19,6 @@ from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder from utils.exceptions import DynamoServiceException -MOCK_CLIENT_ERROR = ClientError( - {"Error": {"Code": 500, "Message": "Test error message"}}, "Query" -) - @pytest.fixture def mock_service(set_env, mocker): diff --git a/lambdas/tests/unit/services/test_bulk_upload_report_service.py b/lambdas/tests/unit/services/test_bulk_upload_report_service.py index 5a2b7fe5b..6f8febcc0 100644 --- a/lambdas/tests/unit/services/test_bulk_upload_report_service.py +++ b/lambdas/tests/unit/services/test_bulk_upload_report_service.py @@ -21,7 +21,7 @@ TEST_UPLOADER_ODS_2, ) from tests.unit.helpers.data.bulk_upload.test_data import readfile -from tests.unit.helpers.data.dynamo_scan_response import ( +from tests.unit.helpers.data.dynamo.dynamo_scan_response import ( MOCK_EMPTY_RESPONSE, UNEXPECTED_RESPONSE, ) diff --git a/lambdas/tests/unit/services/test_document_deletion_service.py b/lambdas/tests/unit/services/test_document_deletion_service.py index e68c7e1a4..b9a07ffc7 100644 --- a/lambdas/tests/unit/services/test_document_deletion_service.py +++ b/lambdas/tests/unit/services/test_document_deletion_service.py @@ -1,19 +1,24 @@ from unittest.mock import call import pytest -from enums.s3_lifecycle_tags import S3LifecycleTags +from enums.document_retention import DocumentRetentionDays from enums.supported_document_types import SupportedDocumentTypes from services.document_deletion_service import DocumentDeletionService from tests.unit.conftest import ( MOCK_ARF_TABLE_NAME, + MOCK_BUCKET, MOCK_LG_TABLE_NAME, NRL_SQS_URL, + TEST_FILE_KEY, TEST_NHS_NUMBER, ) +from tests.unit.helpers.data.dynamo.dynamo_stream import MOCK_OLD_IMAGE_MODEL from tests.unit.helpers.data.test_documents import ( create_test_doc_store_refs, create_test_lloyd_george_doc_store_refs, ) +from utils.exceptions import DocumentServiceException +from utils.lambda_exceptions import DocumentDeletionServiceException TEST_DOC_STORE_REFERENCES = create_test_doc_store_refs() TEST_LG_DOC_STORE_REFERENCES = create_test_lloyd_george_doc_store_refs() @@ -65,6 +70,14 @@ def mock_document_query(mocker): ) +@pytest.fixture +def mock_delete_document_object(mocker, mock_deletion_service): + yield mocker.patch.object( + mock_deletion_service.document_service, + "delete_document_object", + ) + + def test_handle_delete_for_all_doc_type( mock_delete_specific_doc_type, mock_deletion_service, mocker ): @@ -73,7 +86,7 @@ def test_handle_delete_for_all_doc_type( mocker.MagicMock() ) - actual = mock_deletion_service.handle_delete( + actual = mock_deletion_service.handle_reference_delete( TEST_NHS_NUMBER, [SupportedDocumentTypes.ARF, SupportedDocumentTypes.LG] ) @@ -97,7 +110,7 @@ def test_handle_delete_all_doc_type_when_only_lg_records_available( ) expected = TEST_LG_DOC_STORE_REFERENCES - actual = mock_deletion_service.handle_delete( + actual = mock_deletion_service.handle_reference_delete( nhs_number, [SupportedDocumentTypes.LG, SupportedDocumentTypes.ARF] ) @@ -124,7 +137,7 @@ def test_handle_delete_for_one_doc_type( mocker.MagicMock() ) - actual = mock_deletion_service.handle_delete(TEST_NHS_NUMBER, [doc_type]) + actual = mock_deletion_service.handle_reference_delete(TEST_NHS_NUMBER, [doc_type]) assert actual == expected @@ -140,7 +153,7 @@ def test_handle_delete_when_no_record_for_patient_return_empty_list( ) expected = [] - actual = mock_deletion_service.handle_delete( + actual = mock_deletion_service.handle_reference_delete( TEST_NHS_NUMBER_WITH_NO_RECORD, [SupportedDocumentTypes.LG, SupportedDocumentTypes.ARF], ) @@ -163,18 +176,17 @@ def test_delete_specific_doc_type( "get_documents_references_in_storage", return_value=doc_ref, ) - mock_deletion_service.document_service.delete_documents.return_value = [] - type_of_delete = str(S3LifecycleTags.SOFT_DELETE.value) + mock_deletion_service.document_service.delete_document_references.return_value = [] expected = doc_ref actual = mock_deletion_service.delete_specific_doc_type(TEST_NHS_NUMBER, doc_type) assert actual == expected - mock_deletion_service.document_service.delete_documents.assert_called_once_with( + mock_deletion_service.document_service.delete_document_references.assert_called_once_with( table_name=table_name, document_references=doc_ref, - type_of_delete=type_of_delete, + document_ttl_days=DocumentRetentionDays.SOFT_DELETE, ) @@ -189,17 +201,17 @@ def test_delete_specific_doc_type_when_no_record_for_given_patient( mocker.patch.object( mock_deletion_service, "get_documents_references_in_storage", return_value=[] ) - mock_deletion_service.document_service.delete_documents.return_value = [] + mock_deletion_service.document_service.delete_document_references.return_value = [] actual = mock_deletion_service.delete_specific_doc_type( TEST_NHS_NUMBER_WITH_NO_RECORD, doc_type ) assert actual == expected - mock_deletion_service.document_service.delete_documents.assert_not_called() + mock_deletion_service.document_service.delete_document_references.assert_not_called() -def test_delete_documents_references_in_stitch_table(mocker, mock_deletion_service): +def test_delete_documents_references_in_stitch_table(mock_deletion_service): mock_deletion_service.stitch_service.query_stitch_trace_with_nhs_number.return_value = ( TEST_LG_DOC_STORE_REFERENCES ) @@ -243,3 +255,42 @@ def test_send_sqs_message_to_remove_pointer(mocker, mock_deletion_service): message_body=expected_message_body, queue_url=NRL_SQS_URL, ) + + +def test_handle_object_delete_successfully_deletes_s3_object( + mock_deletion_service, mock_delete_document_object, caplog +): + test_reference = MOCK_OLD_IMAGE_MODEL + + expected_log_message = "Successfully deleted Document Reference S3 Object" + + mock_deletion_service.handle_object_delete(test_reference) + + mock_delete_document_object.assert_called_once_with( + bucket=MOCK_BUCKET, key=TEST_FILE_KEY + ) + assert expected_log_message in caplog.records[-1].msg + + +def test_handle_object_delete_invalid_filepath_raises_exception( + mock_deletion_service, mock_delete_document_object +): + test_reference = MOCK_OLD_IMAGE_MODEL + test_reference.file_location = "" + + with pytest.raises(DocumentDeletionServiceException): + mock_deletion_service.handle_object_delete(test_reference) + mock_delete_document_object.assert_not_called() + + +def test_handle_object_delete_DocumentService_exception_raises_exception( + mock_deletion_service, mock_delete_document_object, caplog +): + test_reference = MOCK_OLD_IMAGE_MODEL + mock_delete_document_object.side_effect = DocumentServiceException() + + with pytest.raises(DocumentDeletionServiceException): + mock_deletion_service.handle_object_delete(test_reference) + mock_delete_document_object.assert_called_once_with( + bucket=MOCK_BUCKET, key=TEST_FILE_KEY + ) diff --git a/lambdas/tests/unit/services/test_document_reference_search_service.py b/lambdas/tests/unit/services/test_document_reference_search_service.py index c55396133..667fd3c9e 100644 --- a/lambdas/tests/unit/services/test_document_reference_search_service.py +++ b/lambdas/tests/unit/services/test_document_reference_search_service.py @@ -4,7 +4,7 @@ from botocore.exceptions import ClientError from models.document_reference import DocumentReference from services.document_reference_search_service import DocumentReferenceSearchService -from tests.unit.helpers.data.dynamo_responses import MOCK_SEARCH_RESPONSE +from tests.unit.helpers.data.dynamo.dynamo_responses import MOCK_SEARCH_RESPONSE from utils.exceptions import DynamoServiceException from utils.lambda_exceptions import DocumentRefSearchException diff --git a/lambdas/tests/unit/services/test_document_service.py b/lambdas/tests/unit/services/test_document_service.py index 7b800eca2..3bd4d578d 100644 --- a/lambdas/tests/unit/services/test_document_service.py +++ b/lambdas/tests/unit/services/test_document_service.py @@ -2,9 +2,9 @@ from unittest.mock import call import pytest +from enums.document_retention import DocumentRetentionDays from enums.dynamo_filter import AttributeOperator from enums.metadata_field_names import DocumentReferenceMetadataFields -from enums.s3_lifecycle_tags import S3LifecycleDays, S3LifecycleTags from enums.supported_document_types import SupportedDocumentTypes from freezegun import freeze_time from models.document_reference import DocumentReference @@ -15,7 +15,7 @@ MOCK_TABLE_NAME, TEST_NHS_NUMBER, ) -from tests.unit.helpers.data.dynamo_responses import ( +from tests.unit.helpers.data.dynamo.dynamo_responses import ( MOCK_EMPTY_RESPONSE, MOCK_SEARCH_RESPONSE, ) @@ -23,6 +23,7 @@ create_test_lloyd_george_doc_store_refs, ) from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder +from utils.exceptions import DocumentServiceException MOCK_DOCUMENT = MOCK_SEARCH_RESPONSE["Items"][0] @@ -191,22 +192,15 @@ def test_delete_documents_soft_delete( test_doc_ref = DocumentReference.model_validate(MOCK_DOCUMENT) test_date = datetime.now() - ttl_date = test_date + timedelta(days=float(S3LifecycleDays.SOFT_DELETE)) + ttl_date = test_date + timedelta(days=float(DocumentRetentionDays.SOFT_DELETE)) test_update_fields = { "Deleted": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "TTL": int(ttl_date.timestamp()), } - mock_service.delete_documents( - MOCK_TABLE_NAME, [test_doc_ref], str(S3LifecycleTags.SOFT_DELETE.value) - ) - - mock_s3_service.create_object_tag.assert_called_once_with( - file_key=test_doc_ref.get_file_key(), - s3_bucket_name=test_doc_ref.get_file_bucket(), - tag_key="soft-delete", - tag_value="true", + mock_service.delete_document_references( + MOCK_TABLE_NAME, [test_doc_ref], DocumentRetentionDays.SOFT_DELETE ) mock_dynamo_service.update_item.assert_called_once_with( @@ -221,22 +215,15 @@ def test_delete_documents_death_delete( test_doc_ref = DocumentReference.model_validate(MOCK_DOCUMENT) test_date = datetime.now() - ttl_date = test_date + timedelta(days=float(S3LifecycleDays.DEATH_DELETE)) + ttl_date = test_date + timedelta(days=float(DocumentRetentionDays.DEATH)) test_update_fields = { "Deleted": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "TTL": int(ttl_date.timestamp()), } - mock_service.delete_documents( - MOCK_TABLE_NAME, [test_doc_ref], str(S3LifecycleTags.DEATH_DELETE.value) - ) - - mock_s3_service.create_object_tag.assert_called_once_with( - file_key=test_doc_ref.get_file_key(), - s3_bucket_name=test_doc_ref.get_file_bucket(), - tag_key="patient-death", - tag_value="true", + mock_service.delete_document_references( + MOCK_TABLE_NAME, [test_doc_ref], DocumentRetentionDays.DEATH ) mock_dynamo_service.update_item.assert_called_once_with( @@ -296,3 +283,51 @@ def test_check_existing_lloyd_george_records_return_true_if_upload_in_progress( response = mock_service.is_upload_in_process(mock_records_upload_in_process) assert response + + +def test_delete_document_object_successfully_deletes_s3_object(mock_service, caplog): + test_bucket = "test-s3-bucket" + test_file_key = "9000000000/test-file.pdf" + + expected_log_message = "Located file, attempting S3 object deletion" + + mock_service.s3_service.file_exist_on_s3.side_effect = [ + True, + False, + ] + + mock_service.delete_document_object(bucket=test_bucket, key=test_file_key) + + assert mock_service.s3_service.file_exist_on_s3.call_count == 2 + mock_service.s3_service.file_exist_on_s3.assert_called_with( + s3_bucket_name=test_bucket, file_key=test_file_key + ) + mock_service.s3_service.delete_object.assert_called_with( + s3_bucket_name=test_bucket, file_key=test_file_key + ) + + assert expected_log_message in caplog.records[-1].msg + + +def test_delete_document_object_fails_to_delete_s3_object(mock_service, caplog): + test_bucket = "test-s3-bucket" + test_file_key = "9000000000/test-file.pdf" + + expected_err_msg = "Document located in S3 after deletion" "" + + mock_service.s3_service.file_exist_on_s3.side_effect = [ + True, + True, + ] + + with pytest.raises(DocumentServiceException) as e: + mock_service.delete_document_object(bucket=test_bucket, key=test_file_key) + + assert mock_service.s3_service.file_exist_on_s3.call_count == 2 + mock_service.s3_service.file_exist_on_s3.assert_called_with( + s3_bucket_name=test_bucket, file_key=test_file_key + ) + mock_service.s3_service.delete_object.assert_called_with( + s3_bucket_name=test_bucket, file_key=test_file_key + ) + assert expected_err_msg == str(e.value) diff --git a/lambdas/tests/unit/utils/test_dynamo_utils.py b/lambdas/tests/unit/utils/test_dynamo_utils.py index 4182e81aa..5f0525e28 100644 --- a/lambdas/tests/unit/utils/test_dynamo_utils.py +++ b/lambdas/tests/unit/utils/test_dynamo_utils.py @@ -1,10 +1,19 @@ +import json + +import pytest from enums.metadata_field_names import DocumentReferenceMetadataFields +from tests.unit.helpers.data.dynamo.dynamo_stream import ( + MOCK_OLD_IMAGE_EVENT, + MOCK_OLD_IMAGE_MODEL, +) +from unit.conftest import TEST_DOCUMENT_LOCATION, TEST_NHS_NUMBER, TEST_UUID from utils.dynamo_utils import ( create_expression_attribute_placeholder, create_expression_attribute_values, create_expression_value_placeholder, create_expressions, create_update_expression, + parse_dynamo_record, ) @@ -104,3 +113,41 @@ def test_create_expression_attribute_placeholder_camel_case(): actual = create_expression_attribute_placeholder(test_value) assert actual == expected + + +def test_parse_dynamo_record_parses_correctly(): + test_data = MOCK_OLD_IMAGE_EVENT + + expected = { + "ContentType": MOCK_OLD_IMAGE_MODEL.content_type, + "FileName": MOCK_OLD_IMAGE_MODEL.file_name, + "Uploading": MOCK_OLD_IMAGE_MODEL.uploading, + "TTL": MOCK_OLD_IMAGE_MODEL.ttl, + "Created": MOCK_OLD_IMAGE_MODEL.created, + "Uploaded": MOCK_OLD_IMAGE_MODEL.uploaded, + "FileLocation": TEST_DOCUMENT_LOCATION, + "CurrentGpOds": MOCK_OLD_IMAGE_MODEL.current_gp_ods, + "VirusScannerResult": MOCK_OLD_IMAGE_MODEL.virus_scanner_result, + "Deleted": MOCK_OLD_IMAGE_MODEL.deleted, + "ID": TEST_UUID, + "LastUpdated": MOCK_OLD_IMAGE_MODEL.last_updated, + "NhsNumber": TEST_NHS_NUMBER, + } + + actual = parse_dynamo_record(test_data) + + assert actual == expected + + +@pytest.mark.parametrize( + "test_json_string", + [ + '{"Test": {"BOOL": "Not Bool"}}', + '{"Test": {"N": "Not Integer"}}', + ], +) +def test_parse_dynamo_record_raises_value_error(test_json_string): + test_object = json.loads(test_json_string) + + with pytest.raises(ValueError): + parse_dynamo_record(test_object) diff --git a/lambdas/utils/decorators/override_error_check.py b/lambdas/utils/decorators/override_error_check.py index 422ad3eee..55cd8adbc 100644 --- a/lambdas/utils/decorators/override_error_check.py +++ b/lambdas/utils/decorators/override_error_check.py @@ -33,7 +33,9 @@ def interceptor(event, context): if error_override is None or error_override == "": return lambda_func(event, context) - response = check_manual_error_conditions(error_override, event["httpMethod"]) + response = check_manual_error_conditions( + error_override, event.get("httpMethod", "GET") + ) return response diff --git a/lambdas/utils/decorators/validate_dynamo_stream_event.py b/lambdas/utils/decorators/validate_dynamo_stream_event.py new file mode 100644 index 000000000..5eff2a6da --- /dev/null +++ b/lambdas/utils/decorators/validate_dynamo_stream_event.py @@ -0,0 +1,47 @@ +from typing import Callable + +from enums.lambda_error import LambdaError +from utils.audit_logging_setup import LoggingService +from utils.lambda_response import ApiGatewayResponse + +logger = LoggingService(__name__) + + +def validate_dynamo_stream(lambda_func: Callable): + """A decorator for lambda handler. + Verify that the incoming dynamo stream event contains a valid event type and usable image. + If not, returns a 400 Bad request response before actual lambda was triggered. + + Usage: + @validate_dynamo_stream_event + def lambda_handler(event, context): + ... + """ + + def interceptor(event, context): + dynamo_records = event.get("Records") + + if not dynamo_records: + logger.error("Received an empty stream event from DynamoDb") + return ApiGatewayResponse( + 400, + LambdaError.DynamoInvalidStreamEvent.create_error_body(), + event.get("httpMethod", "GET"), + ).create_api_gateway_response() + + for record in dynamo_records: + dynamo_event = record.get("dynamodb", {}) + event_name = record.get("eventName") + if not dynamo_event or not event_name: + logger.error( + "Failed to extract dynamo event details from DynamoDb stream" + ) + return ApiGatewayResponse( + 400, + LambdaError.DynamoInvalidStreamEvent.create_error_body(), + event.get("httpMethod", "GET"), + ).create_api_gateway_response() + + return lambda_func(event, context) + + return interceptor diff --git a/lambdas/utils/dynamo_utils.py b/lambdas/utils/dynamo_utils.py index 9d77ede7f..784da272e 100644 --- a/lambdas/utils/dynamo_utils.py +++ b/lambdas/utils/dynamo_utils.py @@ -1,4 +1,5 @@ from datetime import datetime +from typing import Any, Dict import inflection from enums.dynamo_filter import AttributeOperator @@ -131,3 +132,18 @@ def filter_uploaded_docs_and_recently_uploading_docs(): return delete_filter_expression & ( uploaded_filter_expression | uploading_filter_expression ) + + +def parse_dynamo_record(dynamodb_record: Dict[str, Any]) -> Dict[str, Any]: + result = {} + for key, value in dynamodb_record.items(): + match value: + case {"S": str(s)}: + result[key] = s + case {"N": str(n)}: + result[key] = int(n) + case {"BOOL": bool(b)}: + result[key] = b + case _: + raise ValueError(f"Unsupported DynamoDB type for key {key}: {value}") + return result diff --git a/lambdas/utils/exceptions.py b/lambdas/utils/exceptions.py index 898c557ad..4bafff1e9 100644 --- a/lambdas/utils/exceptions.py +++ b/lambdas/utils/exceptions.py @@ -42,6 +42,10 @@ class DynamoServiceException(Exception): pass +class DocumentServiceException(Exception): + pass + + class MissingEnvVarException(Exception): pass From 51b9a332874e52e19086d9a2aed0d53aba0f8207 Mon Sep 17 00:00:00 2001 From: Abbas Khan Date: Thu, 12 Dec 2024 17:15:52 +0000 Subject: [PATCH 2/3] fix tests path --- lambdas/tests/unit/utils/test_dynamo_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lambdas/tests/unit/utils/test_dynamo_utils.py b/lambdas/tests/unit/utils/test_dynamo_utils.py index 5f0525e28..012ee15a4 100644 --- a/lambdas/tests/unit/utils/test_dynamo_utils.py +++ b/lambdas/tests/unit/utils/test_dynamo_utils.py @@ -2,11 +2,11 @@ import pytest from enums.metadata_field_names import DocumentReferenceMetadataFields +from tests.unit.conftest import TEST_DOCUMENT_LOCATION, TEST_NHS_NUMBER, TEST_UUID from tests.unit.helpers.data.dynamo.dynamo_stream import ( MOCK_OLD_IMAGE_EVENT, MOCK_OLD_IMAGE_MODEL, ) -from unit.conftest import TEST_DOCUMENT_LOCATION, TEST_NHS_NUMBER, TEST_UUID from utils.dynamo_utils import ( create_expression_attribute_placeholder, create_expression_attribute_values, From fd01048831f78a3b775e3ee4b297749476882fe9 Mon Sep 17 00:00:00 2001 From: Abbas Khan Date: Fri, 13 Dec 2024 14:54:57 +0000 Subject: [PATCH 3/3] add detail to log message --- lambdas/services/document_service.py | 4 ++- .../unit/services/test_document_service.py | 4 +-- lambdas/tests/unit/utils/test_dynamo_utils.py | 29 ++++++++++++------- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 7fbd1daaa..910c1d90a 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -103,7 +103,9 @@ def delete_document_object(self, bucket: str, key: str): if not file_exists: raise DocumentServiceException("Document does not exist in S3") - logger.info("Located file, attempting S3 object deletion") + logger.info( + f"Located file `{key}` in `{bucket}`, attempting S3 object deletion" + ) self.s3_service.delete_object(s3_bucket_name=bucket, file_key=key) file_exists = self.s3_service.file_exist_on_s3( diff --git a/lambdas/tests/unit/services/test_document_service.py b/lambdas/tests/unit/services/test_document_service.py index 3bd4d578d..0939f2848 100644 --- a/lambdas/tests/unit/services/test_document_service.py +++ b/lambdas/tests/unit/services/test_document_service.py @@ -289,7 +289,7 @@ def test_delete_document_object_successfully_deletes_s3_object(mock_service, cap test_bucket = "test-s3-bucket" test_file_key = "9000000000/test-file.pdf" - expected_log_message = "Located file, attempting S3 object deletion" + expected_log_message = f"Located file `{test_file_key}` in `{test_bucket}`, attempting S3 object deletion" mock_service.s3_service.file_exist_on_s3.side_effect = [ True, @@ -313,7 +313,7 @@ def test_delete_document_object_fails_to_delete_s3_object(mock_service, caplog): test_bucket = "test-s3-bucket" test_file_key = "9000000000/test-file.pdf" - expected_err_msg = "Document located in S3 after deletion" "" + expected_err_msg = "Document located in S3 after deletion" mock_service.s3_service.file_exist_on_s3.side_effect = [ True, diff --git a/lambdas/tests/unit/utils/test_dynamo_utils.py b/lambdas/tests/unit/utils/test_dynamo_utils.py index 012ee15a4..17fba49c3 100644 --- a/lambdas/tests/unit/utils/test_dynamo_utils.py +++ b/lambdas/tests/unit/utils/test_dynamo_utils.py @@ -2,7 +2,13 @@ import pytest from enums.metadata_field_names import DocumentReferenceMetadataFields -from tests.unit.conftest import TEST_DOCUMENT_LOCATION, TEST_NHS_NUMBER, TEST_UUID +from tests.unit.conftest import ( + TEST_CURRENT_GP_ODS, + TEST_DOCUMENT_LOCATION, + TEST_FILE_KEY, + TEST_NHS_NUMBER, + TEST_UUID, +) from tests.unit.helpers.data.dynamo.dynamo_stream import ( MOCK_OLD_IMAGE_EVENT, MOCK_OLD_IMAGE_MODEL, @@ -117,20 +123,21 @@ def test_create_expression_attribute_placeholder_camel_case(): def test_parse_dynamo_record_parses_correctly(): test_data = MOCK_OLD_IMAGE_EVENT + test_image = MOCK_OLD_IMAGE_MODEL expected = { - "ContentType": MOCK_OLD_IMAGE_MODEL.content_type, - "FileName": MOCK_OLD_IMAGE_MODEL.file_name, - "Uploading": MOCK_OLD_IMAGE_MODEL.uploading, - "TTL": MOCK_OLD_IMAGE_MODEL.ttl, - "Created": MOCK_OLD_IMAGE_MODEL.created, - "Uploaded": MOCK_OLD_IMAGE_MODEL.uploaded, + "ContentType": test_image.content_type, + "FileName": TEST_FILE_KEY, + "Uploading": test_image.uploading, + "TTL": test_image.ttl, + "Created": test_image.created, + "Uploaded": test_image.uploaded, "FileLocation": TEST_DOCUMENT_LOCATION, - "CurrentGpOds": MOCK_OLD_IMAGE_MODEL.current_gp_ods, - "VirusScannerResult": MOCK_OLD_IMAGE_MODEL.virus_scanner_result, - "Deleted": MOCK_OLD_IMAGE_MODEL.deleted, + "CurrentGpOds": TEST_CURRENT_GP_ODS, + "VirusScannerResult": test_image.virus_scanner_result, + "Deleted": test_image.deleted, "ID": TEST_UUID, - "LastUpdated": MOCK_OLD_IMAGE_MODEL.last_updated, + "LastUpdated": test_image.last_updated, "NhsNumber": TEST_NHS_NUMBER, }