From c5aa314d5f695d82b6b57fb1a41c6109a6282c77 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 <60785969+matusdrobuliak66@users.noreply.github.com> Date: Mon, 8 Apr 2024 12:15:14 +0200 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20reusing=20the=20same=20S3?= =?UTF-8?q?=20client=20(#5289)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../aws-library/src/aws_library/s3/client.py | 43 +++++++------- .../aws-library/src/aws_library/s3/errors.py | 57 +++++++++++++++++++ packages/aws-library/tests/test_s3_client.py | 2 +- .../services/resource_tracker_service_runs.py | 2 +- ...i_resource_tracker_service_runs__export.py | 2 +- services/storage/requirements/_base.in | 1 + services/storage/requirements/_base.txt | 1 + services/storage/requirements/ci.txt | 1 + services/storage/requirements/dev.txt | 1 + services/storage/requirements/prod.txt | 1 + .../src/simcore_service_storage/exceptions.py | 15 ----- .../handlers_health.py | 2 +- .../storage/src/simcore_service_storage/s3.py | 37 ++++++------ .../src/simcore_service_storage/s3_client.py | 50 ++++------------ .../src/simcore_service_storage/s3_utils.py | 53 ++--------------- .../simcore_service_storage/simcore_s3_dsm.py | 2 +- .../simcore_service_storage/utils_handlers.py | 3 +- .../storage/tests/unit/test_handlers_files.py | 2 +- .../test_handlers_simcore_s3_benchmark.py | 25 ++++---- services/storage/tests/unit/test_s3_client.py | 57 +++++++++---------- .../storage/tests/unit/test_utils_handlers.py | 2 +- 21 files changed, 166 insertions(+), 193 deletions(-) diff --git a/packages/aws-library/src/aws_library/s3/client.py b/packages/aws-library/src/aws_library/s3/client.py index 7da6e1ad32f..a74658784ee 100644 --- a/packages/aws-library/src/aws_library/s3/client.py +++ b/packages/aws-library/src/aws_library/s3/client.py @@ -4,7 +4,6 @@ from typing import cast import aioboto3 -import botocore.exceptions from aiobotocore.session import ClientCreatorContext from botocore.client import Config from models_library.api_schemas_storage import S3BucketName @@ -12,19 +11,24 @@ from settings_library.s3 import S3Settings from types_aiobotocore_s3 import S3Client -from .errors import S3RuntimeError +from .errors import s3_exception_handler _logger = logging.getLogger(__name__) +_S3_MAX_CONCURRENCY_DEFAULT = 10 + @dataclass(frozen=True) class SimcoreS3API: client: S3Client session: aioboto3.Session exit_stack: contextlib.AsyncExitStack + transfer_max_concurrency: int @classmethod - async def create(cls, settings: S3Settings) -> "SimcoreS3API": + async def create( + cls, settings: S3Settings, s3_max_concurrency: int = _S3_MAX_CONCURRENCY_DEFAULT + ) -> "SimcoreS3API": session = aioboto3.Session() session_client = session.client( "s3", @@ -37,10 +41,11 @@ async def create(cls, settings: S3Settings) -> "SimcoreS3API": ) assert isinstance(session_client, ClientCreatorContext) # nosec exit_stack = contextlib.AsyncExitStack() - s3_client = cast( - S3Settings, await exit_stack.enter_async_context(session_client) - ) - return cls(s3_client, session, exit_stack) + s3_client = cast(S3Client, await exit_stack.enter_async_context(session_client)) + # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend + await s3_client.list_buckets() + + return cls(s3_client, session, exit_stack, s3_max_concurrency) async def close(self) -> None: await self.exit_stack.aclose() @@ -53,21 +58,19 @@ async def http_check_bucket_connected(self, bucket: S3BucketName) -> bool: except Exception: # pylint: disable=broad-except return False - async def create_presigned_download_link( + @s3_exception_handler(_logger) + async def create_single_presigned_download_link( self, bucket_name: S3BucketName, object_key: str, expiration_secs: int, ) -> AnyUrl: - try: - # NOTE: ensure the bucket/object exists, this will raise if not - await self.client.head_bucket(Bucket=bucket_name) - generated_link = await self.client.generate_presigned_url( - "get_object", - Params={"Bucket": bucket_name, "Key": object_key}, - ExpiresIn=expiration_secs, - ) - url: AnyUrl = parse_obj_as(AnyUrl, generated_link) - return url - except botocore.exceptions.ClientError as exc: - raise S3RuntimeError from exc # pragma: no cover + # NOTE: ensure the bucket/object exists, this will raise if not + await self.client.head_bucket(Bucket=bucket_name) + generated_link = await self.client.generate_presigned_url( + "get_object", + Params={"Bucket": bucket_name, "Key": object_key}, + ExpiresIn=expiration_secs, + ) + url: AnyUrl = parse_obj_as(AnyUrl, generated_link) + return url diff --git a/packages/aws-library/src/aws_library/s3/errors.py b/packages/aws-library/src/aws_library/s3/errors.py index 39c6de33440..13e60fd0b0b 100644 --- a/packages/aws-library/src/aws_library/s3/errors.py +++ b/packages/aws-library/src/aws_library/s3/errors.py @@ -1,3 +1,7 @@ +import functools +import logging + +from botocore import exceptions as botocore_exc from pydantic.errors import PydanticErrorMixin @@ -7,3 +11,56 @@ class S3RuntimeError(PydanticErrorMixin, RuntimeError): class S3NotConnectedError(S3RuntimeError): msg_template: str = "Cannot connect with s3 server" + + +class S3AccessError(S3RuntimeError): + code = "s3_access.error" + msg_template: str = "Unexpected error while accessing S3 backend" + + +class S3BucketInvalidError(S3AccessError): + code = "s3_bucket.invalid_error" + msg_template: str = "The bucket '{bucket}' is invalid" + + +class S3KeyNotFoundError(S3AccessError): + code = "s3_key.not_found_error" + msg_template: str = "The file {key} in {bucket} was not found" + + +def s3_exception_handler(log: logging.Logger): + """converts typical aiobotocore/boto exceptions to storage exceptions + NOTE: this is a work in progress as more exceptions might arise in different + use-cases + """ + + def decorator(func): # noqa: C901 + @functools.wraps(func) + async def wrapper(self, *args, **kwargs): + try: + return await func(self, *args, **kwargs) + except self.client.exceptions.NoSuchBucket as exc: + raise S3BucketInvalidError( + bucket=exc.response.get("Error", {}).get("BucketName", "undefined") + ) from exc + except botocore_exc.ClientError as exc: + status_code = int(exc.response.get("Error", {}).get("Code", -1)) + operation_name = exc.operation_name + + match status_code, operation_name: + case 404, "HeadObject": + raise S3KeyNotFoundError(bucket=args[0], key=args[1]) from exc + case (404, "HeadBucket") | (403, "HeadBucket"): + raise S3BucketInvalidError(bucket=args[0]) from exc + case _: + raise S3AccessError from exc + except botocore_exc.EndpointConnectionError as exc: + raise S3AccessError from exc + + except botocore_exc.BotoCoreError as exc: + log.exception("Unexpected error in s3 client: ") + raise S3AccessError from exc + + return wrapper + + return decorator diff --git a/packages/aws-library/tests/test_s3_client.py b/packages/aws-library/tests/test_s3_client.py index 9f3998dcc51..a6ee8c3033f 100644 --- a/packages/aws-library/tests/test_s3_client.py +++ b/packages/aws-library/tests/test_s3_client.py @@ -127,7 +127,7 @@ async def upload_file_to_bucket( async def test_create_single_presigned_download_link( simcore_s3_api: SimcoreS3API, upload_file_to_bucket: None, create_s3_bucket ): - download_url = await simcore_s3_api.create_presigned_download_link( + download_url = await simcore_s3_api.create_single_presigned_download_link( create_s3_bucket, "test.csv", 50 ) assert isinstance(download_url, AnyUrl) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py index f2628b6c6ad..0f3d4baa1ba 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/resource_tracker_service_runs.py @@ -168,7 +168,7 @@ async def export_service_runs( ) # Create presigned S3 link - generated_url: AnyUrl = await s3_client.create_presigned_download_link( + generated_url: AnyUrl = await s3_client.create_single_presigned_download_link( bucket_name=s3_bucket_name, object_key=s3_object_key, expiration_secs=_PRESIGNED_LINK_EXPIRATION_SEC, diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py index fb669d195e6..c53c1accb90 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_resource_tracker_service_runs__export.py @@ -36,7 +36,7 @@ async def mocked_export(mocker: MockerFixture): @pytest.fixture async def mocked_presigned_link(mocker: MockerFixture): mock_presigned_link = mocker.patch( - "simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.SimcoreS3API.create_presigned_download_link", + "simcore_service_resource_usage_tracker.services.resource_tracker_service_runs.SimcoreS3API.create_single_presigned_download_link", return_value=parse_obj_as( AnyUrl, "https://www.testing.com/", diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index bf75460b05b..2db40016724 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -5,6 +5,7 @@ --constraint ../../../requirements/constraints.txt +--requirement ../../../packages/aws-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in --requirement ../../../packages/settings-library/requirements/_base.in diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 83f1aa141fb..08791682dcd 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -191,6 +191,7 @@ typing-extensions==4.10.0 # pydantic # typer-slim # types-aiobotocore + # types-aiobotocore-ec2 # types-aiobotocore-s3 ujson==5.9.0 # via aiohttp-swagger diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 983135c3d6c..f3d92884e0e 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -11,6 +11,7 @@ --requirement _test.txt # installs this repo's packages +simcore-aws-library @ ../../packages/aws-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ pytest-simcore @ ../../packages/pytest-simcore/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index b65f331713c..0b2b3ae2938 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/aws-library/ --editable ../../packages/models-library --editable ../../packages/postgres-database/ --editable ../../packages/pytest-simcore/ diff --git a/services/storage/requirements/prod.txt b/services/storage/requirements/prod.txt index d8b3f5543f8..cd48217e0da 100644 --- a/services/storage/requirements/prod.txt +++ b/services/storage/requirements/prod.txt @@ -10,6 +10,7 @@ --requirement _base.txt # installs this repo's packages +simcore-aws-library @ ../../packages/aws-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ simcore-service-library[aiohttp] @ ../../packages/service-library diff --git a/services/storage/src/simcore_service_storage/exceptions.py b/services/storage/src/simcore_service_storage/exceptions.py index 53bedbd9584..d41c6d16d75 100644 --- a/services/storage/src/simcore_service_storage/exceptions.py +++ b/services/storage/src/simcore_service_storage/exceptions.py @@ -35,18 +35,3 @@ class ProjectNotFoundError(DatabaseAccessError): class LinkAlreadyExistsError(DatabaseAccessError): code = "link.already_exists_error" msg_template: str = "The link {file_id} already exists" - - -class S3AccessError(StorageRuntimeError): - code = "s3_access.error" - msg_template: str = "Unexpected error while accessing S3 backend" - - -class S3BucketInvalidError(S3AccessError): - code = "s3_bucket.invalid_error" - msg_template: str = "The bucket '{bucket}' is invalid" - - -class S3KeyNotFoundError(S3AccessError): - code = "s3_key.not_found_error" - msg_template: str = "The file {key} in {bucket} was not found" diff --git a/services/storage/src/simcore_service_storage/handlers_health.py b/services/storage/src/simcore_service_storage/handlers_health.py index 5b6dab3d37d..edd6498220e 100644 --- a/services/storage/src/simcore_service_storage/handlers_health.py +++ b/services/storage/src/simcore_service_storage/handlers_health.py @@ -6,6 +6,7 @@ import logging from aiohttp import web +from aws_library.s3.errors import S3AccessError, S3BucketInvalidError from models_library.api_schemas_storage import HealthCheck, S3BucketName from models_library.app_diagnostics import AppStatusCheck from servicelib.json_serialization import json_dumps @@ -15,7 +16,6 @@ from .constants import APP_CONFIG_KEY from .db import get_engine_state from .db import is_service_responsive as is_pg_responsive -from .exceptions import S3AccessError, S3BucketInvalidError from .s3 import get_s3_client from .settings import Settings diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py index 15e48194372..d529f08c193 100644 --- a/services/storage/src/simcore_service_storage/s3.py +++ b/services/storage/src/simcore_service_storage/s3.py @@ -3,7 +3,6 @@ """ import json import logging -from contextlib import AsyncExitStack from typing import cast from aiohttp import web @@ -25,30 +24,30 @@ async def setup_s3_client(app): storage_s3_settings = storage_settings.STORAGE_S3 assert storage_s3_settings # nosec - async with AsyncExitStack() as exit_stack: - client = None - async for attempt in AsyncRetrying( - wait=wait_fixed(RETRY_WAIT_SECS), - before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True, - ): - with attempt: - client = await StorageS3Client.create( - exit_stack, - storage_s3_settings, - storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - log.info( - "S3 client %s successfully created [%s]", - f"{client=}", - json.dumps(attempt.retry_state.retry_object.statistics), - ) + client = None + async for attempt in AsyncRetrying( + wait=wait_fixed(RETRY_WAIT_SECS), + before_sleep=before_sleep_log(log, logging.WARNING), + reraise=True, + ): + with attempt: + client = await StorageS3Client.create( + storage_s3_settings, + storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + log.info( + "S3 client %s successfully created [%s]", + f"{client=}", + json.dumps(attempt.retry_state.retry_object.statistics), + ) assert client # nosec app[APP_S3_KEY] = client yield # tear-down log.debug("closing %s", f"{client=}") + await client.close() + log.info("closed s3 client %s", f"{client=}") diff --git a/services/storage/src/simcore_service_storage/s3_client.py b/services/storage/src/simcore_service_storage/s3_client.py index dc4f5f8c8de..0588a06607b 100644 --- a/services/storage/src/simcore_service_storage/s3_client.py +++ b/services/storage/src/simcore_service_storage/s3_client.py @@ -3,15 +3,13 @@ import logging import urllib.parse from collections.abc import AsyncGenerator, Callable -from contextlib import AsyncExitStack from dataclasses import dataclass from pathlib import Path -from typing import Any, Final, TypeAlias, cast +from typing import Any, Final, TypeAlias -import aioboto3 -from aiobotocore.session import ClientCreatorContext +from aws_library.s3.client import SimcoreS3API +from aws_library.s3.errors import S3KeyNotFoundError, s3_exception_handler from boto3.s3.transfer import TransferConfig -from botocore.client import Config from models_library.api_schemas_storage import UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID @@ -19,8 +17,6 @@ from pydantic import AnyUrl, ByteSize, NonNegativeInt, parse_obj_as from servicelib.logging_utils import log_context from servicelib.utils import logged_gather -from settings_library.s3 import S3Settings -from simcore_service_storage.exceptions import S3KeyNotFoundError from types_aiobotocore_s3 import S3Client from types_aiobotocore_s3.type_defs import ( ListObjectsV2OutputTypeDef, @@ -30,7 +26,7 @@ from .constants import EXPAND_DIR_MAX_ITEM_COUNT, MULTIPART_UPLOADS_MIN_TOTAL_SIZE from .models import ETag, MultiPartUploadLinks, S3BucketName, UploadID -from .s3_utils import compute_num_file_chunks, s3_exception_handler +from .s3_utils import compute_num_file_chunks _logger = logging.getLogger(__name__) @@ -59,9 +55,11 @@ def from_botocore_object(obj: ObjectTypeDef) -> "S3MetaData": file_id=SimcoreS3FileID(obj["Key"]), last_modified=obj["LastModified"], e_tag=json.loads(obj["ETag"]), - sha256_checksum=SHA256Str(obj.get("ChecksumSHA256")) - if obj.get("ChecksumSHA256") - else None, + sha256_checksum=( + SHA256Str(obj.get("ChecksumSHA256")) + if obj.get("ChecksumSHA256") + else None + ), size=obj["Size"], ) @@ -85,35 +83,7 @@ async def _list_objects_v2_paginated_gen( yield items_in_page -@dataclass -class StorageS3Client: # pylint: disable=too-many-public-methods - session: aioboto3.Session - client: S3Client - transfer_max_concurrency: int - - @classmethod - async def create( - cls, exit_stack: AsyncExitStack, settings: S3Settings, s3_max_concurrency: int - ) -> "StorageS3Client": - # upon creation the client does not try to connect, one need to make an operation - session = aioboto3.Session() - # NOTE: session.client returns an aiobotocore client enhanced with aioboto3 fcts (e.g. download_file, upload_file, copy_file...) - session_client = session.client( - "s3", - endpoint_url=settings.S3_ENDPOINT, - aws_access_key_id=settings.S3_ACCESS_KEY, - aws_secret_access_key=settings.S3_SECRET_KEY, - aws_session_token=settings.S3_ACCESS_TOKEN, - region_name=settings.S3_REGION, - config=Config(signature_version="s3v4"), - ) - assert isinstance(session_client, ClientCreatorContext) # nosec - client = cast(S3Client, await exit_stack.enter_async_context(session_client)) - # NOTE: this triggers a botocore.exception.ClientError in case the connection is not made to the S3 backend - await client.list_buckets() - - return cls(session, client, s3_max_concurrency) - +class StorageS3Client(SimcoreS3API): # pylint: disable=too-many-public-methods @s3_exception_handler(_logger) async def create_bucket(self, bucket: S3BucketName) -> None: _logger.debug("Creating bucket: %s", bucket) diff --git a/services/storage/src/simcore_service_storage/s3_utils.py b/services/storage/src/simcore_service_storage/s3_utils.py index 6ee3ad177d6..0e68973b80e 100644 --- a/services/storage/src/simcore_service_storage/s3_utils.py +++ b/services/storage/src/simcore_service_storage/s3_utils.py @@ -1,9 +1,7 @@ -import functools import logging from dataclasses import dataclass -from typing import Final, Optional +from typing import Final -from botocore import exceptions as botocore_exc from pydantic import ByteSize, parse_obj_as from servicelib.aiohttp.long_running_tasks.server import ( ProgressMessage, @@ -11,8 +9,6 @@ TaskProgress, ) -from .exceptions import S3AccessError, S3BucketInvalidError, S3KeyNotFoundError - logger = logging.getLogger(__name__) # this is artifically defined, if possible we keep a maximum number of requests for parallel @@ -47,49 +43,10 @@ def compute_num_file_chunks(file_size: ByteSize) -> tuple[int, ByteSize]: ) -def s3_exception_handler(log: logging.Logger): - """converts typical aiobotocore/boto exceptions to storage exceptions - NOTE: this is a work in progress as more exceptions might arise in different - use-cases - """ - - def decorator(func): - @functools.wraps(func) - async def wrapper(self, *args, **kwargs): - try: - response = await func(self, *args, **kwargs) - except self.client.exceptions.NoSuchBucket as exc: - raise S3BucketInvalidError( - bucket=exc.response.get("Error", {}).get("BucketName", "undefined") - ) from exc - except botocore_exc.ClientError as exc: - if exc.response.get("Error", {}).get("Code") == "404": - if exc.operation_name == "HeadObject": - raise S3KeyNotFoundError(bucket=args[0], key=args[1]) from exc - if exc.operation_name == "HeadBucket": - raise S3BucketInvalidError(bucket=args[0]) from exc - if exc.response.get("Error", {}).get("Code") == "403": - if exc.operation_name == "HeadBucket": - raise S3BucketInvalidError(bucket=args[0]) from exc - raise S3AccessError from exc - except botocore_exc.EndpointConnectionError as exc: - raise S3AccessError from exc - - except botocore_exc.BotoCoreError as exc: - log.exception("Unexpected error in s3 client: ") - raise S3AccessError from exc - - return response - - return wrapper - - return decorator - - def update_task_progress( - task_progress: Optional[TaskProgress], - message: Optional[ProgressMessage] = None, - progress: Optional[ProgressPercent] = None, + task_progress: TaskProgress | None, + message: ProgressMessage | None = None, + progress: ProgressPercent | None = None, ) -> None: logger.debug("%s [%s]", message or "", progress or "n/a") if task_progress: @@ -98,7 +55,7 @@ def update_task_progress( @dataclass class S3TransferDataCB: - task_progress: Optional[TaskProgress] + task_progress: TaskProgress | None total_bytes_to_transfer: ByteSize task_progress_message_prefix: str = "" _total_bytes_copied: int = 0 diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index f64b0703fff..c845a5b9c95 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -13,6 +13,7 @@ from aiohttp import web from aiopg.sa import Engine from aiopg.sa.connection import SAConnection +from aws_library.s3.errors import S3KeyNotFoundError from models_library.api_schemas_storage import LinkType, S3BucketName, UploadedPart from models_library.basic_types import SHA256Str from models_library.projects import ProjectID @@ -57,7 +58,6 @@ LinkAlreadyExistsError, ProjectAccessRightError, ProjectNotFoundError, - S3KeyNotFoundError, ) from .models import ( DatasetMetaData, diff --git a/services/storage/src/simcore_service_storage/utils_handlers.py b/services/storage/src/simcore_service_storage/utils_handlers.py index d6f83a1fe33..b13c89459e1 100644 --- a/services/storage/src/simcore_service_storage/utils_handlers.py +++ b/services/storage/src/simcore_service_storage/utils_handlers.py @@ -3,6 +3,7 @@ from aiohttp import web from aiohttp.typedefs import Handler from aiohttp.web_request import Request +from aws_library.s3.errors import S3AccessError, S3KeyNotFoundError from pydantic import ValidationError from servicelib.aiohttp.aiopg_utils import DBAPIError @@ -14,8 +15,6 @@ LinkAlreadyExistsError, ProjectAccessRightError, ProjectNotFoundError, - S3AccessError, - S3KeyNotFoundError, ) _logger = logging.getLogger(__name__) diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index 1a7b3f8c5ed..c52bfbf0511 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -22,6 +22,7 @@ from aiohttp import ClientSession from aiohttp.test_utils import TestClient from aiopg.sa import Engine +from aws_library.s3.errors import S3KeyNotFoundError from faker import Faker from models_library.api_schemas_storage import ( FileMetaDataGet, @@ -48,7 +49,6 @@ MULTIPART_UPLOADS_MIN_TOTAL_SIZE, S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID, ) -from simcore_service_storage.exceptions import S3KeyNotFoundError from simcore_service_storage.handlers_files import UPLOAD_TASKS_KEY from simcore_service_storage.models import S3BucketName, UploadID from simcore_service_storage.s3_client import StorageS3Client diff --git a/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py b/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py index 094ef17a26f..5c755252f7a 100644 --- a/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py +++ b/services/storage/tests/unit/test_handlers_simcore_s3_benchmark.py @@ -6,7 +6,7 @@ import sys import time from collections.abc import AsyncIterator, Iterable -from contextlib import AsyncExitStack, asynccontextmanager +from contextlib import asynccontextmanager from itertools import groupby from pathlib import Path from typing import Any, TypeAlias, TypedDict @@ -51,21 +51,20 @@ def settings() -> Settings: async def benchmark_s3_client( benchmark_s3_settings: S3Settings, settings: Settings ) -> AsyncIterator[StorageS3Client]: - async with AsyncExitStack() as exit_stack: - client = await StorageS3Client.create( - exit_stack, - benchmark_s3_settings, - settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - bucket = S3BucketName(benchmark_s3_settings.S3_BUCKET_NAME) + client = await StorageS3Client.create( + benchmark_s3_settings, + settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + bucket = S3BucketName(benchmark_s3_settings.S3_BUCKET_NAME) - # make sure bucket is empty - await client.delete_files_in_path(bucket, prefix="") + # make sure bucket is empty + await client.delete_files_in_path(bucket, prefix="") - yield client + yield client - # empty bucket once more when done testing - await client.delete_files_in_path(bucket, prefix="") + # empty bucket once more when done testing + await client.delete_files_in_path(bucket, prefix="") + await client.close() @asynccontextmanager diff --git a/services/storage/tests/unit/test_s3_client.py b/services/storage/tests/unit/test_s3_client.py index a2bb8dd0748..46b34988e48 100644 --- a/services/storage/tests/unit/test_s3_client.py +++ b/services/storage/tests/unit/test_s3_client.py @@ -8,7 +8,6 @@ import asyncio import json from collections.abc import AsyncIterator, Awaitable, Callable -from contextlib import AsyncExitStack from dataclasses import dataclass from pathlib import Path from random import choice @@ -18,6 +17,11 @@ import botocore.exceptions import pytest from aiohttp import ClientSession +from aws_library.s3.errors import ( + S3AccessError, + S3BucketInvalidError, + S3KeyNotFoundError, +) from faker import Faker from models_library.api_schemas_storage import UploadedPart from models_library.basic_types import SHA256Str @@ -28,11 +32,6 @@ from pytest_mock import MockFixture from pytest_simcore.helpers.utils_envs import EnvVarsDict from pytest_simcore.helpers.utils_parametrizations import byte_size_ids -from simcore_service_storage.exceptions import ( - S3AccessError, - S3BucketInvalidError, - S3KeyNotFoundError, -) from simcore_service_storage.models import MultiPartUploadLinks, S3BucketName from simcore_service_storage.s3_client import ( StorageS3Client, @@ -56,17 +55,19 @@ def mock_config( monkeypatch.setenv("STORAGE_POSTGRES", "null") -async def test_storage_storage_s3_client_creation(app_settings: Settings): +async def test_storage_storage_s3_client_creation( + app_settings: Settings, +): assert app_settings.STORAGE_S3 - async with AsyncExitStack() as exit_stack: - storage_s3_client = await StorageS3Client.create( - exit_stack, - app_settings.STORAGE_S3, - app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - assert storage_s3_client - response = await storage_s3_client.client.list_buckets() - assert not response["Buckets"] + storage_s3_client = await StorageS3Client.create( + app_settings.STORAGE_S3, + app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + assert storage_s3_client + response = await storage_s3_client.client.list_buckets() + assert not response["Buckets"] + + await storage_s3_client.close() with pytest.raises(botocore.exceptions.HTTPClientError): await storage_s3_client.client.list_buckets() @@ -76,19 +77,17 @@ async def storage_s3_client( app_settings: Settings, ) -> AsyncIterator[StorageS3Client]: assert app_settings.STORAGE_S3 - async with AsyncExitStack() as exit_stack: - storage_s3_client = await StorageS3Client.create( - exit_stack, - app_settings.STORAGE_S3, - app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, - ) - # check that no bucket is lying around - assert storage_s3_client - response = await storage_s3_client.client.list_buckets() - assert not response[ - "Buckets" - ], f"for testing puproses, there should be no bucket lying around! {response=}" - yield storage_s3_client + storage_s3_client = await StorageS3Client.create( + app_settings.STORAGE_S3, + app_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY, + ) + # check that no bucket is lying around + assert storage_s3_client + response = await storage_s3_client.client.list_buckets() + assert not response[ + "Buckets" + ], f"for testing puproses, there should be no bucket lying around! {response=}" + yield storage_s3_client async def test_create_bucket(storage_s3_client: StorageS3Client, faker: Faker): diff --git a/services/storage/tests/unit/test_utils_handlers.py b/services/storage/tests/unit/test_utils_handlers.py index 3ad1e27446c..f471126ca06 100644 --- a/services/storage/tests/unit/test_utils_handlers.py +++ b/services/storage/tests/unit/test_utils_handlers.py @@ -6,6 +6,7 @@ import pytest from aiohttp import web from aiohttp.typedefs import Handler +from aws_library.s3.errors import S3KeyNotFoundError from pydantic import BaseModel, ValidationError from pytest_mock import MockerFixture from servicelib.aiohttp.aiopg_utils import DBAPIError @@ -15,7 +16,6 @@ FileMetaDataNotFoundError, ProjectAccessRightError, ProjectNotFoundError, - S3KeyNotFoundError, ) from simcore_service_storage.utils_handlers import dsm_exception_handler