diff --git a/ci/github/system-testing/swarm-deploy.bash b/ci/github/system-testing/swarm-deploy.bash index 40de3730a56..36c5fbd1f0f 100755 --- a/ci/github/system-testing/swarm-deploy.bash +++ b/ci/github/system-testing/swarm-deploy.bash @@ -25,10 +25,11 @@ test() { # WARNING: this test is heavy. Due to limited CI machine power, please do not # add too much overhead (e.g. low log-level etc) pytest \ + --asyncio-mode=auto \ --color=yes \ - -v \ --durations=5 \ --log-level=INFO \ + -v \ tests/swarm-deploy } diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index b5bb2f35b53..b3a647b4c89 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -49,7 +49,7 @@ def _is_docker_swarm_init(docker_client: docker.client.DockerClient) -> bool: before_sleep=before_sleep_log(log, logging.WARNING), reraise=True, ) -def assert_service_is_running(service): +def assert_service_is_running(service) -> None: """Checks that a number of tasks of this service are in running state""" def _get(obj: dict[str, Any], dotted_key: str, default=None) -> Any: @@ -101,7 +101,6 @@ def _fetch_and_print_services( print(HEADER_STR.format(f"docker services running {extra_title}")) for service_obj in docker_client.services.list(): - tasks = {} service = {} with suppress(Exception): @@ -179,6 +178,56 @@ def docker_swarm( assert _is_docker_swarm_init(docker_client) is keep_docker_up +def _force_restart_migration_service(docker_client: docker.client.DockerClient) -> None: + for migration_service in ( + service + for service in docker_client.services.list() + if "migration" in service.name + ): + print( + "WARNING: migration service detected before updating stack, it will be force-updated" + ) + migration_service.force_update() + print(f"forced updated {migration_service.name}.") + + +def _deploy_stack(compose_file: Path, stack_name: str) -> None: + for attempt in Retrying( + stop=stop_after_delay(60), + wait=wait_random_exponential(max=5), + retry=retry_if_exception_type(TryAgain), + reraise=True, + ): + with attempt: + try: + subprocess.run( + [ + "docker", + "stack", + "deploy", + "--with-registry-auth", + "--compose-file", + f"{compose_file.name}", + f"{stack_name}", + ], + check=True, + cwd=compose_file.parent, + capture_output=True, + ) + except subprocess.CalledProcessError as err: + if b"update out of sequence" in err.stderr: + raise TryAgain from err + print( + "docker_stack failed", + f"{' '.join(err.cmd)}", + f"returncode={err.returncode}", + f"stdout={err.stdout}", + f"stderr={err.stderr}", + "\nTIP: frequent failure is due to a corrupt .env file: Delete .env and .env.bak", + ) + raise + + @pytest.fixture(scope="module") def docker_stack( docker_swarm: None, @@ -211,54 +260,12 @@ def docker_stack( # NOTE: if the migration service was already running prior to this call it must # be force updated so that it does its job. else it remains and tests will fail - for migration_service in ( - service - for service in docker_client.services.list() - if "migration" in service.name # type: ignore - ): - print( - "WARNING: migration service detected before updating stack, it will be force-updated" - ) - migration_service.force_update() # type: ignore - print(f"forced updated {migration_service.name}.") # type: ignore + _force_restart_migration_service(docker_client) # make up-version stacks_deployed: dict[str, dict] = {} for key, stack_name, compose_file in stacks: - for attempt in Retrying( - stop=stop_after_delay(60), - wait=wait_random_exponential(max=5), - retry=retry_if_exception_type(TryAgain), - reraise=True, - ): - with attempt: - try: - subprocess.run( - [ - "docker", - "stack", - "deploy", - "--with-registry-auth", - "--compose-file", - f"{compose_file.name}", - f"{stack_name}", - ], - check=True, - cwd=compose_file.parent, - capture_output=True, - ) - except subprocess.CalledProcessError as err: - if b"update out of sequence" in err.stderr: - raise TryAgain from err - print( - "docker_stack failed", - f"{' '.join(err.cmd)}", - f"returncode={err.returncode}", - f"stdout={err.stdout}", - f"stderr={err.stderr}", - "\nTIP: frequent failure is due to a corrupt .env file: Delete .env and .env.bak", - ) - raise + _deploy_stack(compose_file, stack_name) stacks_deployed[key] = { "name": stack_name, @@ -281,6 +288,11 @@ async def _check_all_services_are_running(): return_when=asyncio.FIRST_EXCEPTION, ) assert done, f"no services ready, they all failed! [{pending}]" + + for future in done: + if exc := future.exception(): + raise exc + assert not pending, f"some service did not start correctly [{pending}]" asyncio.run(_check_all_services_are_running()) @@ -318,7 +330,6 @@ async def _check_all_services_are_running(): stacks.reverse() for _, stack, _ in stacks: - try: subprocess.run( f"docker stack remove {stack}".split(" "), diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 4daca3110bf..a46d87c31eb 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -159,7 +159,9 @@ pyyaml==5.4.1 # dask-gateway # distributed redis==4.5.4 - # via -r requirements/../../../packages/service-library/requirements/_base.in + # via + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in requests==2.28.2 # via fsspec s3fs==2023.3.0 diff --git a/services/dask-sidecar/requirements/_test.in b/services/dask-sidecar/requirements/_test.in index edb8920c5e1..703c87e18d4 100644 --- a/services/dask-sidecar/requirements/_test.in +++ b/services/dask-sidecar/requirements/_test.in @@ -11,9 +11,9 @@ coverage docker faker -minio +moto[server] pytest -pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 +pytest-aiohttp pytest-cov pytest-icdiff pytest-instafail diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 619e3092f85..f3d4ccd3ebd 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -20,18 +20,43 @@ attrs==21.4.0 # via # -c requirements/_base.txt # aiohttp + # jschema-to-python + # jsonschema + # sarif-om +aws-sam-translator==1.66.0 + # via cfn-lint +aws-xray-sdk==2.12.0 + # via moto +blinker==1.6.2 + # via flask +boto3==1.24.59 + # via + # aws-sam-translator + # moto +botocore==1.27.59 + # via + # -c requirements/_base.txt + # aws-xray-sdk + # boto3 + # moto + # s3transfer certifi==2022.12.7 # via # -c requirements/_base.txt - # minio # requests cffi==1.15.1 # via cryptography +cfn-lint==0.77.4 + # via moto charset-normalizer==3.1.0 # via # -c requirements/_base.txt # aiohttp # requests +click==8.1.3 + # via + # -c requirements/_base.txt + # flask coverage==7.2.3 # via # -r requirements/_test.in @@ -39,18 +64,36 @@ coverage==7.2.3 cryptography==40.0.2 # via # -c requirements/../../../requirements/constraints.txt + # moto # pyopenssl + # python-jose + # sshpubkeys docker==6.0.1 - # via -r requirements/_test.in + # via + # -r requirements/_test.in + # moto +ecdsa==0.18.0 + # via + # moto + # python-jose + # sshpubkeys exceptiongroup==1.1.1 # via pytest faker==18.4.0 # via -r requirements/_test.in +flask==2.3.2 + # via + # flask-cors + # moto +flask-cors==3.0.10 + # via moto frozenlist==1.3.3 # via # -c requirements/_base.txt # aiohttp # aiosignal +graphql-core==3.2.3 + # via moto icdiff==2.0.6 # via pytest-icdiff idna==3.4 @@ -60,29 +103,93 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest -minio==7.0.4 +itsdangerous==2.1.2 + # via flask +jinja2==3.1.2 + # via + # -c requirements/_base.txt + # flask + # moto +jmespath==1.0.1 + # via + # -c requirements/_base.txt + # boto3 + # botocore +jschema-to-python==1.2.3 + # via cfn-lint +jsondiff==2.0.0 + # via moto +jsonpatch==1.32 + # via cfn-lint +jsonpickle==3.0.1 + # via jschema-to-python +jsonpointer==2.3 + # via jsonpatch +jsonschema==3.2.0 + # via + # -c requirements/_base.txt + # aws-sam-translator + # cfn-lint + # openapi-schema-validator + # openapi-spec-validator +junit-xml==1.9 + # via cfn-lint +markupsafe==2.1.2 + # via + # -c requirements/_base.txt + # jinja2 + # werkzeug +moto==4.1.8 # via -r requirements/_test.in +mpmath==1.3.0 + # via sympy multidict==6.0.4 # via # -c requirements/_base.txt # aiohttp # yarl +networkx==3.1 + # via cfn-lint +openapi-schema-validator==0.2.3 + # via openapi-spec-validator +openapi-spec-validator==0.4.0 + # via moto packaging==23.0 # via # -c requirements/_base.txt # docker # pytest # pytest-sugar +pbr==5.11.1 + # via + # jschema-to-python + # sarif-om pluggy==1.0.0 # via pytest pprintpp==0.4.0 # via pytest-icdiff +py-partiql-parser==0.3.0 + # via moto +pyasn1==0.5.0 + # via + # python-jose + # rsa pycparser==2.21 # via cffi +pydantic==1.10.2 + # via + # -c requirements/_base.txt + # aws-sam-translator pyftpdlib==1.5.7 # via pytest-localftpserver pyopenssl==23.1.1 # via pytest-localftpserver +pyparsing==3.0.9 + # via moto +pyrsistent==0.19.3 + # via + # -c requirements/_base.txt + # jsonschema pytest==7.3.1 # via # -r requirements/_test.in @@ -113,32 +220,86 @@ pytest-sugar==0.9.7 python-dateutil==2.8.2 # via # -c requirements/_base.txt + # botocore # faker + # moto python-dotenv==1.0.0 # via -r requirements/_test.in +python-jose==3.3.0 + # via moto +pyyaml==5.4.1 + # via + # -c requirements/_base.txt + # cfn-lint + # moto + # openapi-spec-validator + # responses +regex==2023.5.2 + # via cfn-lint requests==2.28.2 # via # -c requirements/_base.txt # docker + # moto + # responses +responses==0.23.1 + # via moto +rsa==4.9 + # via + # -c requirements/../../../requirements/constraints.txt + # python-jose +s3transfer==0.6.0 + # via boto3 +sarif-om==1.0.4 + # via cfn-lint six==1.16.0 # via # -c requirements/_base.txt + # ecdsa + # flask-cors + # jsonschema + # junit-xml # python-dateutil +sshpubkeys==3.3.1 + # via moto +sympy==1.11.1 + # via cfn-lint termcolor==2.2.0 # via pytest-sugar tomli==2.0.1 # via # coverage # pytest +types-pyyaml==6.0.12.9 + # via responses +typing-extensions==4.5.0 + # via + # -c requirements/_base.txt + # aws-sam-translator + # pydantic urllib3==1.26.14 # via # -c requirements/_base.txt + # botocore # docker - # minio # requests + # responses websocket-client==1.5.1 # via docker +werkzeug==2.3.3 + # via + # flask + # moto +wrapt==1.15.0 + # via + # -c requirements/_base.txt + # aws-xray-sdk +xmltodict==0.13.0 + # via moto yarl==1.8.2 # via # -c requirements/_base.txt # aiohttp + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index 6c2a2608d58..eafa5126fea 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -16,7 +16,7 @@ cfgv==3.3.1 # via pre-commit click==8.1.3 # via - # -c requirements/_base.txt + # -c requirements/_test.txt # black # pip-tools dill==0.3.6 @@ -61,7 +61,7 @@ pyproject-hooks==1.0.0 # via build pyyaml==5.4.1 # via - # -c requirements/_base.txt + # -c requirements/_test.txt # pre-commit # watchdog tomli==2.0.1 @@ -75,7 +75,7 @@ tomlkit==0.11.7 # via pylint typing-extensions==4.5.0 # via - # -c requirements/_base.txt + # -c requirements/_test.txt # astroid virtualenv==20.21.0 # via pre-commit @@ -85,7 +85,7 @@ wheel==0.40.0 # via pip-tools wrapt==1.15.0 # via - # -c requirements/_base.txt + # -c requirements/_test.txt # astroid # The following packages are considered to be unsafe in a requirements file: diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py index 6d9d008d105..cf064e278bb 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py @@ -170,7 +170,7 @@ async def parse_line(line: str) -> tuple[LogType, str, str]: return (log_type, timestamp, log) -async def publish_container_logs( +async def _publish_container_logs( service_key: str, service_version: str, container: DockerContainer, @@ -213,7 +213,7 @@ async def _parse_container_log_file( while (await container.show())["State"]["Running"]: if line := await file_pointer.readline(): log_type, _, message = await parse_line(line) - await publish_container_logs( + await _publish_container_logs( service_key=service_key, service_version=service_version, container=container, @@ -228,7 +228,7 @@ async def _parse_container_log_file( # finish reading the logs if possible async for line in file_pointer: log_type, _, message = await parse_line(line) - await publish_container_logs( + await _publish_container_logs( service_key=service_key, service_version=service_version, container=container, @@ -277,7 +277,6 @@ async def _parse_container_docker_logs( container.id, container_name, ) - # TODO: move that file somewhere else async with aiofiles.tempfile.TemporaryDirectory() as tmp_dir: log_file_path = ( Path(tmp_dir) / f"{service_key.split(sep='/')[-1]}_{service_version}.logs" @@ -290,7 +289,7 @@ async def _parse_container_docker_logs( ): await log_fp.write(log_line.encode("utf-8")) log_type, latest_log_timestamp, message = await parse_line(log_line) - await publish_container_logs( + await _publish_container_logs( service_key=service_key, service_version=service_version, container=container, @@ -321,7 +320,7 @@ async def _parse_container_docker_logs( for log_line in missing_logs: await log_fp.write(log_line.encode("utf-8")) log_type, latest_log_timestamp, message = await parse_line(log_line) - await publish_container_logs( + await _publish_container_logs( service_key=service_key, service_version=service_version, container=container, diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index c9f4ede9854..a5f27e7850a 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -5,15 +5,15 @@ from pathlib import Path from pprint import pformat -from typing import Any, Callable, Iterable, Iterator, Optional +from typing import AsyncIterator, Callable, Iterable, Iterator import dask import distributed import fsspec import pytest import simcore_service_dask_sidecar +from aiobotocore.session import AioBaseClient, get_session from faker import Faker -from minio import Minio from pydantic import AnyUrl, parse_obj_as from pytest import MonkeyPatch, TempPathFactory from pytest_localftpserver.servers import ProcessFTPServer @@ -23,11 +23,11 @@ from yarl import URL pytest_plugins = [ + "pytest_simcore.aws_services", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", - "pytest_simcore.minio_service", "pytest_simcore.monkeypatch_extra", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", @@ -53,12 +53,11 @@ def installed_package_dir() -> Path: @pytest.fixture() def mock_service_envs( - mock_env_devel_environment: dict[str, Optional[str]], + mock_env_devel_environment: dict[str, str | None], monkeypatch: MonkeyPatch, mocker: MockerFixture, tmp_path_factory: TempPathFactory, ) -> None: - # Variables directly define inside Dockerfile monkeypatch.setenv("SC_BOOT_MODE", "debug-ptvsd") @@ -107,24 +106,55 @@ def ftp_server(ftpserver: ProcessFTPServer) -> list[URL]: @pytest.fixture -def s3_endpoint_url(minio_config: dict[str, Any]) -> AnyUrl: +def s3_settings(mocked_s3_server_envs: None) -> S3Settings: + return S3Settings.create_from_envs() + + +@pytest.fixture +def s3_endpoint_url(s3_settings: S3Settings) -> AnyUrl: return parse_obj_as( AnyUrl, - f"http{'s' if minio_config['client']['secure'] else ''}://{minio_config['client']['endpoint']}", + f"{s3_settings.S3_ENDPOINT}", ) @pytest.fixture -def s3_settings(minio_config: dict[str, Any], minio_service: Minio) -> S3Settings: - return S3Settings.create_from_envs() +async def aiobotocore_s3_client( + s3_settings: S3Settings, s3_endpoint_url: AnyUrl +) -> AsyncIterator[AioBaseClient]: + session = get_session() + async with session.create_client( + "s3", + endpoint_url=f"{s3_endpoint_url}", + aws_secret_access_key="xxx", + aws_access_key_id="xxx", + ) as client: + yield client + + +@pytest.fixture +async def bucket( + aiobotocore_s3_client: AioBaseClient, s3_settings: S3Settings +) -> AsyncIterator[str]: + response = await aiobotocore_s3_client.create_bucket( + Bucket=s3_settings.S3_BUCKET_NAME + ) + assert "ResponseMetadata" in response + assert "HTTPStatusCode" in response["ResponseMetadata"] + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + response = await aiobotocore_s3_client.list_buckets() + assert response["Buckets"] + assert len(response["Buckets"]) == 1 + bucket_name = response["Buckets"][0]["Name"] + yield bucket_name + # await _clean_bucket_content(aiobotocore_s3_client, bucket_name) @pytest.fixture -def s3_remote_file_url( - minio_config: dict[str, Any], faker: Faker -) -> Callable[..., AnyUrl]: - def creator(file_path: Optional[Path] = None) -> AnyUrl: - file_path_with_bucket = Path(minio_config["bucket_name"]) / ( +def s3_remote_file_url(s3_settings: S3Settings, faker: Faker) -> Callable[..., AnyUrl]: + def creator(file_path: Path | None = None) -> AnyUrl: + file_path_with_bucket = Path(s3_settings.S3_BUCKET_NAME) / ( file_path or faker.file_name() ) return parse_obj_as(AnyUrl, f"s3://{file_path_with_bucket}") diff --git a/services/dask-sidecar/tests/unit/test_file_utils.py b/services/dask-sidecar/tests/unit/test_file_utils.py index 8a0512146fd..6f16e962779 100644 --- a/services/dask-sidecar/tests/unit/test_file_utils.py +++ b/services/dask-sidecar/tests/unit/test_file_utils.py @@ -7,13 +7,12 @@ import zipfile from dataclasses import dataclass from pathlib import Path -from typing import Any, AsyncIterable, Optional, cast +from typing import Any, AsyncIterable, cast from unittest import mock import fsspec import pytest from faker import Faker -from minio import Minio from pydantic import AnyUrl, parse_obj_as from pytest import FixtureRequest from pytest_localftpserver.servers import ProcessFTPServer @@ -35,16 +34,12 @@ async def mocked_log_publishing_cb( yield mocked_callback -pytest_simcore_core_services_selection = [ - "postgres" -] # TODO: unnecessary but test framework requires it, only minio is useful here -pytest_simcore_ops_services_selection = ["minio"] +pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = [] @pytest.fixture -def s3_presigned_link_storage_kwargs( - minio_config: dict[str, Any], minio_service: Minio -) -> dict[str, Any]: +def s3_presigned_link_storage_kwargs(s3_settings: S3Settings) -> dict[str, Any]: return {} @@ -56,30 +51,29 @@ def ftp_remote_file_url(ftpserver: ProcessFTPServer, faker: Faker) -> AnyUrl: @pytest.fixture -def s3_presigned_link_remote_file_url( - minio_config: dict[str, Any], - minio_service: Minio, +async def s3_presigned_link_remote_file_url( + s3_settings: S3Settings, + aiobotocore_s3_client, faker: Faker, ) -> AnyUrl: - return parse_obj_as( AnyUrl, - minio_service.presigned_put_object( - minio_config["bucket_name"], faker.file_name() + await aiobotocore_s3_client.generate_presigned_url( + "put_object", + Params={"Bucket": s3_settings.S3_BUCKET_NAME, "Key": faker.file_name()}, + ExpiresIn=30, ), ) @pytest.fixture -def s3_remote_file_url(minio_config: dict[str, Any], faker: Faker) -> AnyUrl: - return parse_obj_as( - AnyUrl, f"s3://{minio_config['bucket_name']}{faker.file_path()}" - ) +def s3_remote_file_url(s3_settings: S3Settings, faker: Faker) -> AnyUrl: + return parse_obj_as(AnyUrl, f"s3://{s3_settings.S3_BUCKET_NAME}{faker.file_path()}") @dataclass(frozen=True) class StorageParameters: - s3_settings: Optional[S3Settings] + s3_settings: S3Settings | None remote_file_url: AnyUrl @@ -139,7 +133,7 @@ async def test_push_file_to_remote( async def test_push_file_to_remote_s3_http_presigned_link( s3_presigned_link_remote_file_url: AnyUrl, s3_settings: S3Settings, - minio_config: dict[str, Any], + bucket: str, tmp_path: Path, faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, @@ -243,8 +237,8 @@ async def test_pull_file_from_remote( async def test_pull_file_from_remote_s3_presigned_link( s3_settings: S3Settings, s3_remote_file_url: AnyUrl, - minio_service: Minio, - minio_config: dict[str, Any], + aiobotocore_s3_client, + bucket: str, tmp_path: Path, faker: Faker, mocked_log_publishing_cb: mock.AsyncMock, @@ -266,9 +260,13 @@ async def test_pull_file_from_remote_s3_presigned_link( assert s3_remote_file_url.path remote_file_url = parse_obj_as( AnyUrl, - minio_service.presigned_get_object( - minio_config["bucket_name"], - s3_remote_file_url.path.removeprefix(f"/{minio_config['bucket_name']}/"), + await aiobotocore_s3_client.generate_presigned_url( + "get_object", + Params={ + "Bucket": s3_settings.S3_BUCKET_NAME, + "Key": s3_remote_file_url.path.removeprefix("/"), + }, + ExpiresIn=30, ), ) # now let's get the file through the util diff --git a/services/dask-sidecar/tests/unit/test_tasks.py b/services/dask-sidecar/tests/unit/test_tasks.py index 9e392fcc7f4..e34552b7aef 100644 --- a/services/dask-sidecar/tests/unit/test_tasks.py +++ b/services/dask-sidecar/tests/unit/test_tasks.py @@ -32,6 +32,7 @@ TaskOutputDataSchema, ) from distributed import Client +from faker import Faker from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_resources import BootMode @@ -78,7 +79,7 @@ def node_id() -> NodeID: @pytest.fixture() -def dask_subsystem_mock(mocker: MockerFixture) -> dict[str, MockerFixture]: +def dask_subsystem_mock(mocker: MockerFixture) -> dict[str, mock.Mock]: # mock dask client dask_client_mock = mocker.patch("distributed.Client", autospec=True) @@ -141,7 +142,7 @@ class ServiceExampleParam: pytest_simcore_core_services_selection = ["postgres"] -pytest_simcore_ops_services_selection = ["minio"] +pytest_simcore_ops_services_selection = [] def _bash_check_env_exist(variable_name: str, variable_value: str) -> list[str]: @@ -175,6 +176,7 @@ def ubuntu_task( file_on_s3_server: Callable[..., AnyUrl], s3_remote_file_url: Callable[..., AnyUrl], boot_mode: BootMode, + faker: Faker, ) -> ServiceExampleParam: """Creates a console task in an ubuntu distro that checks for the expected files and error in case they are missing""" # let's have some input files on the file server @@ -241,6 +243,7 @@ def ubuntu_task( ) list_of_commands += [ + f"echo '{faker.text(max_nb_chars=17216)}'", f"(test -f ${{INPUT_FOLDER}}/{input_json_file_name} || (echo ${{INPUT_FOLDER}}/{input_json_file_name} file does not exists && exit 1))", f"echo $(cat ${{INPUT_FOLDER}}/{input_json_file_name})", f"sleep {randint(1,4)}", @@ -372,7 +375,7 @@ def test_run_computational_sidecar_real_fct( caplog_info_level: LogCaptureFixture, event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, - dask_subsystem_mock: dict[str, MockerFixture], + dask_subsystem_mock: dict[str, mock.Mock], ubuntu_task: ServiceExampleParam, mocker: MockerFixture, s3_settings: S3Settings, @@ -401,7 +404,7 @@ def test_run_computational_sidecar_real_fct( ubuntu_task.service_version, ) for event in [TaskProgressEvent, TaskLogEvent]: - dask_subsystem_mock["dask_event_publish"].assert_any_call( # type: ignore + dask_subsystem_mock["dask_event_publish"].assert_any_call( name=event.topic_name() ) @@ -560,7 +563,7 @@ def test_failing_service_raises_exception( caplog_info_level: LogCaptureFixture, event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, - dask_subsystem_mock: dict[str, MockerFixture], + dask_subsystem_mock: dict[str, mock.Mock], ubuntu_task_fail: ServiceExampleParam, s3_settings: S3Settings, ): @@ -584,7 +587,7 @@ def test_running_service_that_generates_unexpected_data_raises_exception( caplog_info_level: LogCaptureFixture, event_loop: asyncio.AbstractEventLoop, mock_service_envs: None, - dask_subsystem_mock: dict[str, MockerFixture], + dask_subsystem_mock: dict[str, mock.Mock], ubuntu_task_unexpected_output: ServiceExampleParam, s3_settings: S3Settings, ): diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index 8d7f7d700dd..112be2f3aad 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -88,7 +88,6 @@ def simcore_stack_deployed_services( core_stack_compose_specs: ComposeSpec, docker_client: DockerClient, ) -> list[Service]: - # NOTE: the goal here is NOT to test time-to-deploy but # rather guaranteing that the framework is fully deployed before starting # tests. Obviously in a critical state in which the frameworks has a problem @@ -137,12 +136,14 @@ def simcore_stack_deployed_services( # OPS stack ----------------------------------- +_REQUIRED_OPS_SERVICES = ["minio"] + @pytest.fixture(scope="module") def ops_services_selection(ops_docker_compose: ComposeSpec) -> list[ServiceNameStr]: ## OVERRIDES packages/pytest-simcore/src/pytest_simcore/docker_compose.py::ops_services_selection - # select ALL services for these tests - return list(ops_docker_compose["services"].keys()) + # select only minio for these tests + return _REQUIRED_OPS_SERVICES @pytest.fixture(scope="module") diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 76a6c1d2d2a..dec5e9bc77d 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -165,7 +165,9 @@ pyyaml==5.4.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_test.in redis==4.5.4 - # via -r requirements/../../../packages/service-library/requirements/_base.in + # via + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in requests==2.28.2 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt diff --git a/tests/swarm-deploy/test_frontend_served.py b/tests/swarm-deploy/test_frontend_served.py index ecd8a9b6e8b..ff671ecd50e 100644 --- a/tests/swarm-deploy/test_frontend_served.py +++ b/tests/swarm-deploy/test_frontend_served.py @@ -13,6 +13,12 @@ from yarl import URL +async def test_deployed_services_running( + simcore_stack_deployed_services: list[Service], +): + ... + + @pytest.mark.parametrize( "test_url,expected_in_content", [ @@ -34,8 +40,8 @@ def test_product_frontend_app_served( wait=wait_fixed(5), stop=stop_after_delay(1 * MINUTE), ) - def request_test_url(): - resp = requests.get(test_url) + def request_test_url() -> requests.Response: + resp = requests.get(test_url, timeout=5) assert ( resp.ok ), f"Failed request {resp.url} with {resp.status_code}: {resp.reason}" diff --git a/tests/swarm-deploy/test_stack_deploy.py b/tests/swarm-deploy/test_stack_deploy.py deleted file mode 100644 index 8f6dce8fdb1..00000000000 --- a/tests/swarm-deploy/test_stack_deploy.py +++ /dev/null @@ -1,225 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import asyncio -import json -import logging -import subprocess -import sys -from pathlib import Path - -import aiodocker -import docker -import pytest -import yaml -from pytest_simcore.helpers.constants import MINUTE -from pytest_simcore.helpers.typing_docker import ServiceDict, TaskDict, UrlStr -from pytest_simcore.helpers.typing_tenacity import TenacityStatsDict -from pytest_simcore.helpers.utils_dict import copy_from_dict, get_from_dict -from tenacity._asyncio import AsyncRetrying -from tenacity.before_sleep import before_sleep_log -from tenacity.stop import stop_after_delay -from tenacity.wait import wait_random - -# - -log = logging.getLogger(__name__) - - -async def assert_service_is_running( - service_id: str, docker, *, max_running_delay=1 * MINUTE -) -> tuple[list[TaskDict], TenacityStatsDict]: - MAX_WAIT = 5 - assert max_running_delay > 3 * MAX_WAIT - - # - # The retry-policy constraints in this test - # the time a service takes since it is deployed by the swarm - # until it is running (i.e. started and healthy) - # - retry_policy = dict( - # instead of wait_fix in order to help parallel execution in asyncio.gather - wait=wait_random(1, MAX_WAIT), - stop=stop_after_delay(max_running_delay), - before_sleep=before_sleep_log(log, logging.INFO), - reraise=True, - ) - - async for attempt in AsyncRetrying(**retry_policy): - with attempt: - - # service - service: ServiceDict = await docker.services.inspect(service_id) - - assert service_id == service["ID"] - - service_name = service["Spec"]["Name"] - num_replicas = int( - get_from_dict(service, "Spec.Mode.Replicated.Replicas", default=1) - ) - - # tasks in a service - tasks: list[TaskDict] = await docker.tasks.list( - filters={"service": service_name} - ) - - tasks_current_state = [task["Status"]["State"] for task in tasks] - num_running = sum(current == "running" for current in tasks_current_state) - - # assert condition - is_running: bool = num_replicas == num_running - - error_msg = "" - if not is_running: - # lazy composes error msg - logs_lines = await docker.services.logs( - service_id, - follow=False, - timestamps=True, - tail=50, # SEE *_docker_logs artifacts for details - ) - log_str = " ".join(logs_lines) - tasks_json = json.dumps( - [ - copy_from_dict( - task, - include={ - "ID": ..., - "CreatedAt": ..., - "UpdatedAt": ..., - "Spec": {"ContainerSpec": {"Image"}}, - "Status": {"Timestamp", "State"}, - "DesiredState": ..., - }, - ) - for task in tasks - ], - indent=1, - ) - error_msg = ( - f"{service_name=} has {tasks_current_state=}, but expected at least {num_replicas=} running. " - f"Details:\n" - f"tasks={tasks_json}\n" - f"logs={log_str}\n" - ) - - assert is_running, error_msg - - log.info( - "Connection to %s succeded [%s]", - service_name, - json.dumps(attempt.retry_state.retry_object.statistics), - ) - - return tasks, attempt.retry_state.retry_object.statistics - assert False # never reached - - -# - - -@pytest.fixture -async def docker_async_client(): - client = aiodocker.Docker() - try: - yield client - finally: - await client.close() - - -@pytest.fixture(scope="module") -def core_stack_services_names( - core_docker_compose_file: Path, core_stack_namespace: str -) -> list[str]: - """Expected names of service in core stack at runtime""" - spec_service_names = yaml.safe_load(core_docker_compose_file.read_text())[ - "services" - ].keys() - return sorted(f"{core_stack_namespace}_{s}" for s in spec_service_names) - - -@pytest.fixture(scope="module") -def docker_stack_core_and_ops( - docker_registry: UrlStr, - docker_swarm: None, - docker_client: docker.client.DockerClient, - core_docker_compose_file: Path, - ops_docker_compose_file: Path, - core_stack_namespace: str, - ops_stack_namespace: str, -): - - for key, stack_name, compose_file in [ - ( - "core", - core_stack_namespace, - core_docker_compose_file, - ), - ( - "ops", - ops_stack_namespace, - ops_docker_compose_file, - ), - ]: - print(f"deploying {key}", "-" * 10) - subprocess.run( - f"docker stack deploy --with-registry-auth -c {compose_file.name} {stack_name}", - shell=True, - check=True, - cwd=compose_file.parent, - ) - subprocess.run(f"docker stack ps {stack_name}", shell=True, check=False) - - -# - - -async def test_core_services_running( - docker_stack_core_and_ops: None, - core_stack_namespace: str, - docker_async_client: aiodocker.Docker, - core_stack_services_names: list[str], -): - docker = docker_async_client - - # check expected services deployed - core_services: list[ServiceDict] = await docker.services.list( - filters={"label": f"com.docker.stack.namespace={core_stack_namespace}"} - ) - assert core_services - assert sorted(s["Spec"]["Name"] for s in core_services) == core_stack_services_names - - # check every service is running - results = await asyncio.gather( - *( - assert_service_is_running( - service["ID"], - docker, - # delay adjusted for github-actions runners - max_running_delay=10 * MINUTE, - ) - for service in core_services - ), - # otherwise, the first service failing will stop - return_exceptions=True, - ) - - try: - assert not any(isinstance(r, Exception) for r in results) - - finally: - print("test_core_services_running stats", "-" * 10) - # TODO: dump stats in artifacts to monitor startup performance - for res, service in zip(results, core_services): - print(f"{service['Spec']['Name']:-^50}") - print( - res if isinstance(res, Exception) else json.dumps(res[1]), - ) - - -if __name__ == "__main__": - # NOTE: use in vscode "Run and Debug" -> select 'Python: Current File' - sys.exit( - pytest.main(["-vv", "-s", "--pdb", "--log-cli-level=WARNING", sys.argv[0]]) - )