diff --git a/api/tests/test_individual_openapi_schemas.py b/api/tests/test_individual_openapi_schemas.py index df522deda5a..89e8de87065 100644 --- a/api/tests/test_individual_openapi_schemas.py +++ b/api/tests/test_individual_openapi_schemas.py @@ -7,8 +7,6 @@ from pathlib import Path import pytest -from openapi_spec_validator import validate_spec -from openapi_spec_validator.exceptions import OpenAPISpecValidatorError from utils import dump_specs, is_json_schema, is_openapi_schema, load_specs # Conventions @@ -132,15 +130,3 @@ def converted_specs_testdir(api_specs_dir, all_api_specs_tails, tmpdir_factory): shutil.copy2(basedir / tail, testdir / tail) return testdir - - -@pytest.mark.skip(reason="Implementing in PR 324") -def test_valid_individual_openapi_specs(api_specs_tail, converted_specs_testdir): - # NOTE: api_specs_tail is a parametrized **fixture** - # - api_specs_path = converted_specs_testdir / api_specs_tail - try: - specs = load_specs(api_specs_path) - validate_spec(specs, spec_url=api_specs_path.as_uri()) - except OpenAPISpecValidatorError as err: - pytest.fail(f"Failed validating {api_specs_path}:\n{err.message}") diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py index 29b81200a7e..eda49ff4dc4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py @@ -9,7 +9,6 @@ import docker import yaml -from pytest_simcore.helpers.typing_env import EnvVarsDict from tenacity import retry from tenacity.after import after_log from tenacity.stop import stop_after_attempt @@ -61,17 +60,19 @@ def get_service_published_port( services = [s for s in client.services.list() if str(s.name).endswith(service_name)] if not services: - raise RuntimeError( + msg = ( f"Cannot find published port for service '{service_name}'." "Probably services still not started." ) + raise RuntimeError(msg) service_ports = services[0].attrs["Endpoint"].get("Ports") if not service_ports: - raise RuntimeError( + msg = ( f"Cannot find published port for service '{service_name}' in endpoint." "Probably services still not started." ) + raise RuntimeError(msg) published_port = None msg = ", ".join( @@ -89,7 +90,7 @@ def get_service_published_port( else: ports_to_look_for: list = ( - [target_ports] if isinstance(target_ports, (int, str)) else target_ports + [target_ports] if isinstance(target_ports, int | str) else target_ports ) for target_port in ports_to_look_for: @@ -100,7 +101,8 @@ def get_service_published_port( break if published_port is None: - raise RuntimeError(f"Cannot find published port for {target_ports}. Got {msg}") + msg = f"Cannot find published port for {target_ports}. Got {msg}" + raise RuntimeError(msg) return str(published_port) @@ -111,7 +113,6 @@ def run_docker_compose_config( project_dir: Path, env_file_path: Path, destination_path: Path | None = None, - additional_envs: EnvVarsDict | None = None, ) -> dict: """Runs docker compose config to validate and resolve a compose file configuration @@ -140,13 +141,12 @@ def run_docker_compose_config( ], "Expected yaml/yml file as destination path" # SEE https://docs.docker.com/compose/reference/ - - global_options = [ + bash_options = [ "-p", str(project_dir), # Specify an alternate working directory ] # https://docs.docker.com/compose/environment-variables/#using-the---env-file--option - global_options += [ + bash_options += [ "-e", str(env_file_path), # Custom environment variables ] @@ -155,26 +155,22 @@ def run_docker_compose_config( # - When you use multiple Compose files, all paths in the files are relative to the first configuration file specified with -f. # You can use the --project-directory option to override this base path. for docker_compose_path in docker_compose_paths: - global_options += [os.path.relpath(docker_compose_path, project_dir)] + bash_options += [os.path.relpath(docker_compose_path, project_dir)] # SEE https://docs.docker.com/compose/reference/config/ docker_compose_path = scripts_dir / "docker" / "docker-compose-config.bash" assert docker_compose_path.exists() - cmd = [f"{docker_compose_path}"] + global_options - print(" ".join(cmd)) - - process_environment_variables = dict(os.environ) - if additional_envs: - process_environment_variables |= additional_envs + args = [f"{docker_compose_path}", *bash_options] + print(" ".join(args)) process = subprocess.run( - cmd, + args, shell=False, - check=True, cwd=project_dir, capture_output=True, - env=process_environment_variables, + check=True, + env=None, # NOTE: Do not use since since we pass all necessary env vars via --env-file option of docker compose ) compose_file_str = process.stdout.decode("utf-8") diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py index 3da3a3974c6..a54b02cfd1c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_postgres.py @@ -1,6 +1,7 @@ import logging +from collections.abc import Iterator from contextlib import contextmanager -from typing import Iterator, TypedDict +from typing import TypedDict import simcore_postgres_database.cli import sqlalchemy as sa diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index d853bc865fd..1870b1ba4c7 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -30,23 +30,23 @@ def read_reqs(reqs_path: Path) -> set[str]: SETUP = { + "name": "simcore-service-library", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), "author": "Pedro Crespo-Valero (pcrespov)", "description": "Core service library for simcore (or servicelib)", + "license": "MIT license", + "python_requires": "~=3.10", + "install_requires": tuple(PROD_REQUIREMENTS), + "packages": find_packages(where="src"), + "package_dir": {"": "src"}, + "test_suite": "tests", + "tests_require": tuple(TEST_REQUIREMENTS), "extras_require": { "aiohttp": tuple(AIOHTTP_REQUIREMENTS), - "all": tuple(AIOHTTP_REQUIREMENTS | FASTAPI_REQUIREMENTS), "fastapi": tuple(FASTAPI_REQUIREMENTS), + "all": tuple(AIOHTTP_REQUIREMENTS | FASTAPI_REQUIREMENTS), "test": tuple(TEST_REQUIREMENTS), }, - "install_requires": tuple(PROD_REQUIREMENTS), - "license": "MIT license", - "name": "simcore-service-library", - "package_dir": {"": "src"}, - "packages": find_packages(where="src"), - "python_requires": "~=3.10", - "test_suite": "tests", - "tests_require": tuple(TEST_REQUIREMENTS), - "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), } diff --git a/packages/service-library/tests/fastapi/test_httpx_utils.py b/packages/service-library/tests/fastapi/test_httpx_utils.py index 3d37a77988e..8c206cc034f 100644 --- a/packages/service-library/tests/fastapi/test_httpx_utils.py +++ b/packages/service-library/tests/fastapi/test_httpx_utils.py @@ -62,7 +62,7 @@ async def test_to_curl_command(client: AsyncClient): assert ( cmd_short - == f'curl -X POST -H "host: test_base_http_api" -H "accept: */*" -H "accept-encoding: gzip, deflate" -H "connection: keep-alive" -H "user-agent: python-httpx/0.25.0" -H "x-secret: {_PLACEHOLDER}" -H "content-length: 9" -H "content-type: application/json" -d \'{{"y": 12}}\' https://test_base_http_api/foo?x=3' + == f'curl -X POST -H "host: test_base_http_api" -H "accept: */*" -H "accept-encoding: gzip, deflate" -H "connection: keep-alive" -H "user-agent: python-httpx/{httpx.__version__}" -H "x-secret: {_PLACEHOLDER}" -H "content-length: 9" -H "content-type: application/json" -d \'{{"y": 12}}\' https://test_base_http_api/foo?x=3' ) cmd_long = to_curl_command(response.request, use_short_options=False) @@ -81,14 +81,14 @@ async def test_to_curl_command(client: AsyncClient): assert ( cmd_multiline == textwrap.dedent( - """\ + f"""\ curl \\ -X GET \\ -H "host: test_base_http_api" \\ -H "accept: */*" \\ -H "accept-encoding: gzip, deflate" \\ -H "connection: keep-alive" \\ - -H "user-agent: python-httpx/0.25.0" \\ + -H "user-agent: python-httpx/{httpx.__version__}" \\ https://test_base_http_api/foo?x=3 """ ).strip() @@ -115,5 +115,5 @@ async def test_to_httpx_command(client: AsyncClient): print(cmd_short) assert ( cmd_short - == f'httpx -m POST -c \'{{"y": 12}}\' -h "host" "test_base_http_api" -h "accept" "*/*" -h "accept-encoding" "gzip, deflate" -h "connection" "keep-alive" -h "user-agent" "python-httpx/0.25.0" -h "x-secret" "{_PLACEHOLDER}" -h "content-length" "9" -h "content-type" "application/json" https://test_base_http_api/foo?x=3' + == f'httpx -m POST -c \'{{"y": 12}}\' -h "host" "test_base_http_api" -h "accept" "*/*" -h "accept-encoding" "gzip, deflate" -h "connection" "keep-alive" -h "user-agent" "python-httpx/{httpx.__version__}" -h "x-secret" "{_PLACEHOLDER}" -h "content-length" "9" -h "content-type" "application/json" https://test_base_http_api/foo?x=3' ) diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index d8d36cb39cf..e856827ac39 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -1,12 +1,11 @@ import re import sys from pathlib import Path -from typing import Set from setuptools import find_packages, setup -def read_reqs(reqs_path: Path) -> Set[str]: +def read_reqs(reqs_path: Path) -> set[str]: return { r for r in re.findall( @@ -31,17 +30,17 @@ def read_reqs(reqs_path: Path) -> Set[str]: } ) -SETUP = dict( - name="simcore-sdk", - version=Path(CURRENT_DIR / "VERSION").read_text().strip(), - packages=find_packages(where="src"), - package_dir={"": "src"}, - python_requires=">=3.6", - install_requires=INSTALL_REQUIREMENTS, - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - test_suite="tests", -) +SETUP = { + "name": "simcore-sdk", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "packages": find_packages(where="src"), + "package_dir": {"": "src"}, + "python_requires": ">=3.6", + "install_requires": INSTALL_REQUIREMENTS, + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "test_suite": "tests", +} if __name__ == "__main__": diff --git a/packages/simcore-sdk/tests/conftest.py b/packages/simcore-sdk/tests/conftest.py index e02cf703a1f..88f9b296442 100644 --- a/packages/simcore-sdk/tests/conftest.py +++ b/packages/simcore-sdk/tests/conftest.py @@ -19,8 +19,8 @@ pytest_plugins = [ - "pytest_simcore.aws_server", "pytest_simcore.aws_s3_service", + "pytest_simcore.aws_server", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.file_extra", @@ -42,30 +42,6 @@ def package_dir(): return pdir -@pytest.fixture(scope="session") -def osparc_simcore_root_dir() -> Path: - """osparc-simcore repo root dir""" - WILDCARD = "packages/simcore-sdk" - - root_dir = Path(current_dir) - while not any(root_dir.glob(WILDCARD)) and root_dir != Path("/"): - root_dir = root_dir.parent - - msg = f"'{root_dir}' does not look like the git root directory of osparc-simcore" - assert root_dir.exists(), msg - assert any(root_dir.glob(WILDCARD)), msg - assert any(root_dir.glob(".git")), msg - - return root_dir - - -@pytest.fixture(scope="session") -def env_devel_file(osparc_simcore_root_dir) -> Path: - env_devel_fpath = osparc_simcore_root_dir / ".env-devel" - assert env_devel_fpath.exists() - return env_devel_fpath - - @pytest.fixture(scope="session") def default_configuration_file() -> Path: path = current_dir / "mock" / "default_config.json" @@ -75,8 +51,7 @@ def default_configuration_file() -> Path: @pytest.fixture(scope="session") def default_configuration(default_configuration_file: Path) -> dict[str, Any]: - config = json.loads(default_configuration_file.read_text()) - return config + return json.loads(default_configuration_file.read_text()) @pytest.fixture(scope="session") diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index ac0d1693c2b..20510ff7f23 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -39,10 +39,10 @@ def user_id(postgres_db: sa.engine.Engine) -> Iterable[UserID]: # which would turn this test too complex. # pylint: disable=no-value-for-parameter - stmt = users.insert().values(**random_user(name="test")).returning(users.c.id) - print(f"{stmt}") with postgres_db.connect() as conn: - result = conn.execute(stmt) + result = conn.execute( + users.insert().values(**random_user(name="test")).returning(users.c.id) + ) row = result.first() assert row usr_id = row[users.c.id] diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py new file mode 100644 index 00000000000..81941fb5887 --- /dev/null +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -0,0 +1,293 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import hashlib +import shutil +import zipfile +from collections.abc import Callable +from pathlib import Path +from uuid import uuid4 + +import pytest +from faker import Faker +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID +from models_library.users import UserID +from pydantic import parse_obj_as +from servicelib.progress_bar import ProgressBarData +from settings_library.r_clone import RCloneSettings +from simcore_sdk.node_data import data_manager +from simcore_sdk.node_ports_common import filemanager +from simcore_sdk.node_ports_common.constants import SIMCORE_LOCATION +from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB + +pytest_simcore_core_services_selection = [ + "migration", + "postgres", + "storage", +] + +pytest_simcore_ops_services_selection = [ + "minio", + "adminer", +] + + +# UTILS + + +def _empty_path(path: Path) -> None: + if path.is_file(): + path.unlink() + assert path.exists() is False + path.touch() + assert path.exists() is True + else: + shutil.rmtree(path) + assert path.exists() is False + path.mkdir(parents=True, exist_ok=True) + assert path.exists() is True + + +def _get_file_hashes_in_path(path_to_hash: Path) -> set[tuple[Path, str]]: + def _hash_path(path: Path): + sha256_hash = hashlib.sha256() + with Path.open(path, "rb") as f: + # Read and update hash string value in blocks of 4K + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + return sha256_hash.hexdigest() + + def _relative_path(root_path: Path, full_path: Path) -> Path: + return full_path.relative_to(root_path) + + if path_to_hash.is_file(): + return {(_relative_path(path_to_hash, path_to_hash), _hash_path(path_to_hash))} + + return { + (_relative_path(path_to_hash, path), _hash_path(path)) + for path in path_to_hash.rglob("*") + } + + +def _make_file_with_content(file_path: Path) -> Path: + content = " ".join(f"{uuid4()}" for x in range(10)) + file_path.write_text(content) + assert file_path.exists() + return file_path + + +def _make_dir_with_files(temp_dir: Path, file_count: int) -> Path: + assert file_count > 0 + + content_dir_path = temp_dir / f"content_dir{uuid4()}" + content_dir_path.mkdir(parents=True, exist_ok=True) + + for _ in range(file_count): + _make_file_with_content(file_path=content_dir_path / f"{uuid4()}_test.txt") + + return content_dir_path + + +def _zip_directory(dir_to_compress: Path, destination: Path) -> None: + dir_to_compress = Path(dir_to_compress) + destination = Path(destination) + + with zipfile.ZipFile(destination, "w", zipfile.ZIP_DEFLATED) as zipf: + for file_path in dir_to_compress.glob("**/*"): + if file_path.is_file(): + zipf.write(file_path, file_path.relative_to(dir_to_compress)) + + +@pytest.fixture +def temp_dir(tmpdir: Path) -> Path: + return Path(tmpdir) + + +@pytest.fixture +def random_tmp_dir_generator(temp_dir: Path) -> Callable[[bool], Path]: + def generator(is_file: bool) -> Path: + random_dir_path = temp_dir / f"{uuid4()}" + random_dir_path.mkdir(parents=True, exist_ok=True) + if is_file: + file_path = random_dir_path / f"{uuid4()}_test.txt" + file_path.touch() + return file_path + + return random_dir_path + + return generator + + +@pytest.fixture +def project_id(project_id: str) -> ProjectID: + return ProjectID(project_id) + + +@pytest.fixture +def node_uuid(faker: Faker) -> NodeID: + return NodeID(faker.uuid4()) + + +@pytest.fixture(params=["dir_content_one_file_path", "dir_content_multiple_files_path"]) +def content_path(request: pytest.FixtureRequest, temp_dir: Path) -> Path: + match request.param: + case "dir_content_one_file_path": + return _make_dir_with_files(temp_dir, file_count=1) + case "dir_content_multiple_files_path": + return _make_dir_with_files(temp_dir, file_count=2) + case _: + pytest.fail("Undefined content_param") + + +async def test_valid_upload_download( + node_ports_config: None, + content_path: Path, + user_id: UserID, + project_id: ProjectID, + node_uuid: NodeID, + r_clone_settings: RCloneSettings, + mock_io_log_redirect_cb: LogRedirectCB, +): + async with ProgressBarData(num_steps=2) as progress_bar: + await data_manager._push_directory( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + source_path=content_path, + io_log_redirect_cb=mock_io_log_redirect_cb, + progress_bar=progress_bar, + r_clone_settings=r_clone_settings, + ) + assert progress_bar._current_steps == pytest.approx(1.0) # noqa: SLF001 + + uploaded_hashes = _get_file_hashes_in_path(content_path) + + _empty_path(content_path) + + await data_manager._pull_directory( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + destination_path=content_path, + io_log_redirect_cb=mock_io_log_redirect_cb, + r_clone_settings=r_clone_settings, + progress_bar=progress_bar, + ) + assert progress_bar._current_steps == pytest.approx(2.0) # noqa: SLF001 + + downloaded_hashes = _get_file_hashes_in_path(content_path) + + assert uploaded_hashes == downloaded_hashes + + +async def test_valid_upload_download_saved_to( + node_ports_config, + content_path: Path, + user_id: UserID, + project_id: ProjectID, + node_uuid: NodeID, + random_tmp_dir_generator: Callable, + r_clone_settings: RCloneSettings, + mock_io_log_redirect_cb: LogRedirectCB, +): + async with ProgressBarData(num_steps=2) as progress_bar: + await data_manager._push_directory( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + source_path=content_path, + io_log_redirect_cb=mock_io_log_redirect_cb, + progress_bar=progress_bar, + r_clone_settings=r_clone_settings, + ) + # pylint: disable=protected-access + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + + uploaded_hashes = _get_file_hashes_in_path(content_path) + + _empty_path(content_path) + + new_destination = random_tmp_dir_generator(is_file=content_path.is_file()) + + await data_manager._pull_directory( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + destination_path=content_path, + save_to=new_destination, + io_log_redirect_cb=mock_io_log_redirect_cb, + r_clone_settings=r_clone_settings, + progress_bar=progress_bar, + ) + assert progress_bar._current_steps == pytest.approx(2) # noqa: SLF001 + + downloaded_hashes = _get_file_hashes_in_path(new_destination) + + assert uploaded_hashes == downloaded_hashes + + +async def test_delete_legacy_archive( + node_ports_config, + content_path: Path, + user_id: UserID, + project_id: ProjectID, + node_uuid: NodeID, + r_clone_settings: RCloneSettings, + temp_dir: Path, +): + async with ProgressBarData(num_steps=2) as progress_bar: + # NOTE: legacy archives can no longer be crated + # generating a "legacy style archive" + archive_into_dir = temp_dir / f"legacy-archive-dir-{uuid4()}" + archive_into_dir.mkdir(parents=True, exist_ok=True) + legacy_archive_name = archive_into_dir / f"{content_path.stem}.zip" + _zip_directory(dir_to_compress=content_path, destination=legacy_archive_name) + + await filemanager.upload_path( + user_id=user_id, + store_id=SIMCORE_LOCATION, + store_name=None, + s3_object=parse_obj_as( + SimcoreS3FileID, f"{project_id}/{node_uuid}/{legacy_archive_name.name}" + ), + path_to_upload=legacy_archive_name, + io_log_redirect_cb=None, + progress_bar=progress_bar, + r_clone_settings=r_clone_settings, + ) + + # pylint: disable=protected-access + assert progress_bar._current_steps == pytest.approx(1) # noqa: SLF001 + + assert ( + await data_manager._state_metadata_entry_exists( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + path=content_path, + is_archive=True, + ) + is True + ) + + await data_manager._delete_legacy_archive( # noqa: SLF001 + project_id=project_id, + node_uuid=node_uuid, + path=content_path, + ) + + assert ( + await data_manager._state_metadata_entry_exists( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + node_uuid=node_uuid, + path=content_path, + is_archive=True, + ) + is False + ) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 255080e8c91..e82306ae2b3 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -11,8 +11,9 @@ import tempfile import threading from asyncio import gather +from collections.abc import Awaitable, Callable, Iterable from pathlib import Path -from typing import Any, Awaitable, Callable, Iterable +from typing import Any from uuid import uuid4 import np_helpers @@ -85,7 +86,7 @@ async def _check_port_valid( if key_name in port_values: if isinstance(port_values[key_name], dict): assert port.value - assert isinstance(port.value, (DownloadLink, PortLink, BaseFileLink)) + assert isinstance(port.value, DownloadLink | PortLink | BaseFileLink) assert ( port.value.dict(by_alias=True, exclude_unset=True) == port_values[key_name] @@ -100,7 +101,7 @@ async def _check_port_valid( async def _check_ports_valid(ports: Nodeports, config_dict: dict, port_type: str): port_schemas = config_dict["schema"][port_type] - for key in port_schemas.keys(): + for key in port_schemas: # test using "key" name await _check_port_valid(ports, config_dict, port_type, key, key) # test using index @@ -254,12 +255,7 @@ async def test_port_value_accessors( ("data:*/*", __file__, Path, {"store": 0, "path": __file__}), ("data:text/*", __file__, Path, {"store": 0, "path": __file__}), ("data:text/py", __file__, Path, {"store": 0, "path": __file__}), - ( - "data:text/py", - pytest.lazy_fixture("symlink_path"), - Path, - pytest.lazy_fixture("config_value_symlink_path"), - ), + ("data:text/py", "symlink_path", Path, "config_value_symlink_path"), ], ) async def test_port_file_accessors( @@ -274,7 +270,14 @@ async def test_port_file_accessors( node_uuid: NodeIDStr, e_tag: str, option_r_clone_settings: RCloneSettings | None, -): # pylint: disable=W0613, W0621 + request: pytest.FixtureRequest, +): + + if item_value == "symlink_path": + item_value = request.getfixturevalue("symlink_path") + if config_value == "config_value_symlink_path": + config_value = request.getfixturevalue("config_value_symlink_path") + config_value["path"] = f"{project_id}/{node_uuid}/{Path(config_value['path']).name}" config_dict, _project_id, _node_uuid = create_special_configuration( diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py index efea5184e1c..d4613b39d5f 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_port.py @@ -122,12 +122,12 @@ def file_with_data() -> Iterator[Path]: @pytest.fixture( params=[ - pytest.lazy_fixture("symlink_to_file_with_data"), - pytest.lazy_fixture("file_with_data"), + "symlink_to_file_with_data", + "file_with_data", ] ) -def this_node_file(request) -> Iterator[Path]: - return request.param +def this_node_file(request: pytest.FixtureRequest) -> Path: + return request.getfixturevalue(request.param) @pytest.fixture diff --git a/services/agent/setup.py b/services/agent/setup.py index 69038620294..22c1b7ae1d7 100755 --- a/services/agent/setup.py +++ b/services/agent/setup.py @@ -38,30 +38,30 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name=NAME, - version=VERSION, - author=AUTHORS, - description=DESCRIPTION, - long_description=README, - license="MIT license", - python_requires="~=3.10", - packages=find_packages(where="src"), - package_dir={ +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - install_requires=PROD_REQUIREMENTS, - test_suite="tests", - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - entry_points={ + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-agent = simcore_service_agent.cli:main", "simcore-service = simcore_service_agent.cli:main", ], }, -) +} if __name__ == "__main__": setup(**SETUP) diff --git a/services/clusters-keeper/setup.py b/services/clusters-keeper/setup.py index ae5b36f9b7d..df644386545 100755 --- a/services/clusters-keeper/setup.py +++ b/services/clusters-keeper/setup.py @@ -39,31 +39,31 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name=NAME, - version=VERSION, - author=AUTHORS, - description=DESCRIPTION, - long_description=README, - license="MIT license", - python_requires="~=3.10", - packages=find_packages(where="src"), - package_dir={ +SETUP = { + "name": NAME, + "version": VERSION, + "author": AUTHORS, + "description": DESCRIPTION, + "long_description": README, + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - package_data={"": ["data/*.yml"]}, - include_package_data=True, - install_requires=PROD_REQUIREMENTS, - test_suite="tests", - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - entry_points={ + "package_data": {"": ["data/*.yml"]}, + "include_package_data": True, + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-clusters-keeper = simcore_service_clusters_keeper.cli:main", "simcore-service = simcore_service_clusters_keeper.cli:main", ], }, -) +} if __name__ == "__main__": setup(**SETUP) diff --git a/services/dask-sidecar/setup.py b/services/dask-sidecar/setup.py index 864a2de4804..3299eab5a6c 100644 --- a/services/dask-sidecar/setup.py +++ b/services/dask-sidecar/setup.py @@ -23,35 +23,35 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name="simcore-service-dask-sidecar", - version=(CURRENT_DIR / "VERSION").read_text().strip(), - author="Pedro Crespo-Valero (pcrespov)", - description="A dask-worker that runs as a sidecar", - classifiers=[ +SETUP = { + "name": "simcore-service-dask-sidecar", + "version": (CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Pedro Crespo-Valero (pcrespov)", + "description": "A dask-worker that runs as a sidecar", + "classifiers": [ "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.10", ], - long_description=(CURRENT_DIR / "README.md").read_text(), - license="MIT license", - python_requires="~=3.10", - packages=find_packages(where="src"), - package_dir={ + "long_description": (CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - install_requires=INSTALL_REQUIREMENTS, - test_suite="tests", - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - entry_points={ + "install_requires": INSTALL_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-dask-sidecar = simcore_service_dask_sidecar.cli:main", "simcore-service = simcore_service_dask_sidecar.cli:main", ], }, -) +} if __name__ == "__main__": diff --git a/services/datcore-adapter/setup.py b/services/datcore-adapter/setup.py index 264fc8e1add..c08158cf963 100644 --- a/services/datcore-adapter/setup.py +++ b/services/datcore-adapter/setup.py @@ -30,29 +30,29 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name="simcore-service-datcore-adapter", - version=(CURRENT_DIR / "VERSION").read_text().strip(), - author="Sylvain Anderegg (sanderegg)", - description="Interfaces with datcore storage", - long_description=(CURRENT_DIR / "README.md").read_text(), - license="MIT license", - python_requires="~=3.10", - packages=find_packages(where="src"), - package_dir={ +SETUP = { + "name": "simcore-service-datcore-adapter", + "version": (CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Sylvain Anderegg (sanderegg)", + "description": "Interfaces with datcore storage", + "long_description": (CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - install_requires=PROD_REQUIREMENTS, - test_suite="tests", - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - entry_points={ + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-datcore-adapter=simcore_service_datcore_adapter.cli:main", "simcore-service=simcore_service_datcore_adapter.cli:main", ], }, -) +} if __name__ == "__main__": diff --git a/services/director-v2/setup.py b/services/director-v2/setup.py index 92f9e8f1bfc..d1f9ffa19ec 100644 --- a/services/director-v2/setup.py +++ b/services/director-v2/setup.py @@ -35,34 +35,34 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name="simcore-service-director-v2", - version=(CURRENT_DIR / "VERSION").read_text().strip(), - author=", ".join( +SETUP = { + "name": "simcore-service-director-v2", + "version": (CURRENT_DIR / "VERSION").read_text().strip(), + "author": ", ".join( ( "Pedro Crespo-Valero (pcrespov)", "Sylvain Anderegg (sanderegg)", ) ), - description="Orchestrates the pipeline of services defined by the user", - long_description=(CURRENT_DIR / "README.md").read_text(), - license="MIT license", - python_requires="~=3.10", - packages=find_packages(where="src"), - package_dir={ + "description": "Orchestrates the pipeline of services defined by the user", + "long_description": (CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "python_requires": "~=3.10", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - install_requires=PROD_REQUIREMENTS, - test_suite="tests", - tests_require=TEST_REQUIREMENTS, - extras_require={"test": TEST_REQUIREMENTS}, - entry_points={ + "install_requires": PROD_REQUIREMENTS, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "entry_points": { "console_scripts": [ "simcore-service-director-v2=simcore_service_director_v2.cli:main", "simcore-service=simcore_service_director_v2.cli:main", ], }, -) +} if __name__ == "__main__": diff --git a/services/dynamic-sidecar/setup.py b/services/dynamic-sidecar/setup.py index 6efe636e0ad..83a6b2abc06 100644 --- a/services/dynamic-sidecar/setup.py +++ b/services/dynamic-sidecar/setup.py @@ -34,32 +34,32 @@ def read_reqs(reqs_path: Path) -> set[str]: TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name="simcore-service-dynamic-sidecar", - version=(CURRENT_DIR / "VERSION").read_text().strip(), - author=", ".join( +SETUP = { + "name": "simcore-service-dynamic-sidecar", + "version": (CURRENT_DIR / "VERSION").read_text().strip(), + "author": ", ".join( ( "Andrei Neagu (GitHK)", "Sylvain Anderegg (sanderegg)", ) ), - description="Implements a sidecar service to manage user's dynamic/interactive services", - packages=find_packages(where="src"), - package_dir={ + "description": "Implements a sidecar service to manage user's dynamic/interactive services", + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - python_requires="~=3.10", - PROD_REQUIREMENTS=PROD_REQUIREMENTS, - TEST_REQUIREMENTS=TEST_REQUIREMENTS, - setup_requires=["setuptools_scm"], - entry_points={ + "include_package_data": True, + "python_requires": "~=3.10", + "PROD_REQUIREMENTS": PROD_REQUIREMENTS, + "TEST_REQUIREMENTS": TEST_REQUIREMENTS, + "setup_requires": ["setuptools_scm"], + "entry_points": { "console_scripts": [ "simcore-service-dynamic-sidecar=simcore_service_dynamic_sidecar.cli:main", "simcore-service=simcore_service_dynamic_sidecar.cli:main", ], }, -) +} if __name__ == "__main__": diff --git a/services/web/server/setup.py b/services/web/server/setup.py index 6ec7537b4c3..aba3c322d87 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -31,40 +31,40 @@ def read_reqs(reqs_path: Path) -> set[str]: ) TEST_REQUIREMENTS = tuple(read_reqs(CURRENT_DIR / "requirements" / "_test.txt")) -SETUP = dict( - name="simcore-service-webserver", - version=Path(CURRENT_DIR / "VERSION").read_text().strip(), - description="Main service with an interface (http-API & websockets) to the web front-end", - author=", ".join( +SETUP = { + "name": "simcore-service-webserver", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "description": "Main service with an interface (http-API & websockets) to the web front-end", + "author": ", ".join( ( "Pedro Crespo-Valero (pcrespov)", "Sylvain Anderegg (sanderegg)", "Andrei Neagu (GitHK)", ) ), - packages=find_packages(where="src"), - package_dir={ + "packages": find_packages(where="src"), + "package_dir": { "": "src", }, - include_package_data=True, - package_data={ + "include_package_data": True, + "package_data": { "": [ "api/v0/openapi.yaml", "api/v0/schemas/*.json", "templates/**/*.jinja2", ] }, - entry_points={ + "entry_points": { "console_scripts": [ "simcore-service-webserver=simcore_service_webserver.__main__:main", "simcore-service=simcore_service_webserver.__main__:main", ] }, - python_requires="~=3.10", - install_requires=INSTALL_REQUIREMENTS, - tests_require=TEST_REQUIREMENTS, - setup_requires=["pytest-runner"], -) + "python_requires": "~=3.10", + "install_requires": INSTALL_REQUIREMENTS, + "tests_require": TEST_REQUIREMENTS, + "setup_requires": ["pytest-runner"], +} if __name__ == "__main__": diff --git a/services/web/server/tests/integration/01/test_exporter_requests_handlers.py b/services/web/server/tests/integration/01/test_exporter_requests_handlers.py index 9bb111e4177..fb5d4d72cc5 100644 --- a/services/web/server/tests/integration/01/test_exporter_requests_handlers.py +++ b/services/web/server/tests/integration/01/test_exporter_requests_handlers.py @@ -234,6 +234,7 @@ async def test_export_project( dir_generated: Path, ): project_id = project["uuid"] + assert client.app url_export = client.app.router["export_project"].url_for(project_id=project_id) headers = {X_PRODUCT_NAME_HEADER: product_name} diff --git a/services/web/server/tests/integration/02/test_computation.py b/services/web/server/tests/integration/02/test_computation.py index f92e0e21a14..d1fe079e363 100644 --- a/services/web/server/tests/integration/02/test_computation.py +++ b/services/web/server/tests/integration/02/test_computation.py @@ -7,9 +7,10 @@ import asyncio import json import time +from collections.abc import Callable from copy import deepcopy from pathlib import Path -from typing import Any, Callable, NamedTuple +from typing import Any, NamedTuple import pytest import sqlalchemy as sa @@ -53,9 +54,6 @@ from tenacity.wait import wait_fixed from yarl import URL -API_PREFIX = "/" + API_VTAG - - # Selection of core and tool services started in this swarm fixture (integration) pytest_simcore_core_services_selection = [ "catalog", @@ -70,7 +68,10 @@ "storage", ] -pytest_simcore_ops_services_selection = ["minio", "adminer"] +pytest_simcore_ops_services_selection = [ + "minio", + "adminer", +] class _ExpectedResponseTuple(NamedTuple): @@ -491,8 +492,9 @@ async def test_run_pipeline_and_check_state( RunningState.ABORTED: 5, } - assert all( # pylint: disable=use-a-generator - [k in running_state_order_lookup for k in RunningState.__members__] + members = [k in running_state_order_lookup for k in RunningState.__members__] + assert all( + members ), "there are missing members in the order lookup, please complete!" pipeline_state = RunningState.UNKNOWN @@ -535,9 +537,8 @@ async def test_run_pipeline_and_check_state( comp_tasks_in_db: dict[NodeIdStr, Any] = _get_computational_tasks_from_db( project_id, postgres_db ) - assert all( # pylint: disable=use-a-generator - [t.state == StateType.SUCCESS for t in comp_tasks_in_db.values()] - ), ( + is_success = [t.state == StateType.SUCCESS for t in comp_tasks_in_db.values()] + assert all(is_success), ( "the individual computational services are not finished! " f"Expected to be completed, got {comp_tasks_in_db=}" )