diff --git a/.env-devel b/.env-devel index b23e780ad7e..4e5f7fdd197 100644 --- a/.env-devel +++ b/.env-devel @@ -117,7 +117,13 @@ WEBSERVER_PROMETHEUS_PORT=9090 WEBSERVER_RESOURCES_DELETION_TIMEOUT_SECONDS=900 WEBSERVER_SESSION_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key=' WEBSERVER_STUDIES_ACCESS_ENABLED=0 -# for debugging + + +# For development ONLY --------------- +# +# AIODEBUG_SLOW_DURATION_SECS=0.25 +# API_SERVER_DEV_HTTP_CALLS_LOGS_PATH=captures.ignore.keep.log # PYTHONTRACEMALLOC=1 # PYTHONASYNCIODEBUG=1 -# AIODEBUG_SLOW_DURATION_SECS=0.25 +# +# ------------------------------------ diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index bcbcefa4345..209acf09789 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,6 +1,5 @@ import urllib.parse from functools import cached_property -from typing import Optional from pydantic import Field, PostgresDsn, SecretStr, conint, validator @@ -30,7 +29,7 @@ class PostgresSettings(BaseCustomSettings): 50, description="Maximum number of connections in the pool" ) - POSTGRES_CLIENT_NAME: Optional[str] = Field( + POSTGRES_CLIENT_NAME: str | None = Field( None, description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", env=[ @@ -87,7 +86,7 @@ def dsn_with_query(self) -> str: class Config(BaseCustomSettings.Config): schema_extra = { "examples": [ - # minimal + # minimal required { "POSTGRES_HOST": "localhost", "POSTGRES_PORT": "5432", diff --git a/services/api-server/src/simcore_service_api_server/modules/__init__.py b/services/api-server/None similarity index 100% rename from services/api-server/src/simcore_service_api_server/modules/__init__.py rename to services/api-server/None diff --git a/services/api-server/src/simcore_service_api_server/_constants.py b/services/api-server/src/simcore_service_api_server/_constants.py new file mode 100644 index 00000000000..5f8975fc285 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/_constants.py @@ -0,0 +1,5 @@ +from typing import Final + +MSG_BACKEND_SERVICE_UNAVAILABLE: Final[ + str +] = "backend service is disabled or unreachable" diff --git a/services/api-server/src/simcore_service_api_server/_meta.py b/services/api-server/src/simcore_service_api_server/_meta.py index 770c84cca8a..978cb59180c 100644 --- a/services/api-server/src/simcore_service_api_server/_meta.py +++ b/services/api-server/src/simcore_service_api_server/_meta.py @@ -1,30 +1,34 @@ """ Application's metadata """ -from contextlib import suppress -import pkg_resources -_current_distribution = pkg_resources.get_distribution("simcore_service_api_server") +from typing import Final -PROJECT_NAME: str = _current_distribution.project_name +from packaging.version import Version +from servicelib.utils_meta import PackageInfo -API_VERSION: str = _current_distribution.version -MAJOR, MINOR, PATCH = _current_distribution.version.split(".") -API_VTAG: str = f"v{MAJOR}" +info: Final = PackageInfo(package_name="simcore-service-api-server") +__version__: Final[str] = info.__version__ -__version__ = _current_distribution.version +PROJECT_NAME: Final[str] = info.project_name +VERSION: Final[Version] = info.version +API_VERSION: Final[str] = info.__version__ +API_VTAG: Final[str] = info.api_prefix_path_tag +SUMMARY: Final[str] = info.get_summary() -def get_summary() -> str: - with suppress(Exception): - try: - metadata = _current_distribution.get_metadata_lines("METADATA") - except FileNotFoundError: - metadata = _current_distribution.get_metadata_lines("PKG-INFO") - return next(x.split(":") for x in metadata if x.startswith("Summary:"))[-1] - return "" +# +# https://patorjk.com/software/taag/#p=display&f=JS%20Stick%20Letters&t=API-server%0A +# +APP_STARTED_BANNER_MSG = r""" + __ __ ___ __ ___ __ + /\ |__) | __ /__` |__ |__) \ / |__ |__) +/~~\ | | .__/ |___ | \ \/ |___ | \ {} +""".format( + f"v{__version__}" +) -SUMMARY: str = get_summary() +APP_FINISHED_BANNER_MSG = info.get_finished_banner() diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py index 4ae6f764472..fae50ece697 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py @@ -5,26 +5,23 @@ from fastapi import Depends, FastAPI, HTTPException, status from fastapi.requests import Request -from ...core.settings import WebServerSettings -from ...modules.webserver import AuthSession +from ..._constants import MSG_BACKEND_SERVICE_UNAVAILABLE +from ...core.settings import ApplicationSettings, WebServerSettings +from ...plugins.webserver import AuthSession +from .application import get_app, get_settings from .authentication import get_active_user_email -UNAVAILBLE_MSG = "backend service is disabled or unreachable" - -def _get_app(request: Request) -> FastAPI: - return request.app - - -def _get_settings(request: Request) -> WebServerSettings: - s = request.app.state.settings.API_SERVER_WEBSERVER - if not s: +def _get_settings( + app_settings: ApplicationSettings = Depends(get_settings), +) -> WebServerSettings: + settings = app_settings.API_SERVER_WEBSERVER + if not settings: raise HTTPException( - status.HTTP_503_SERVICE_UNAVAILABLE, detail="webserver disabled" + status.HTTP_503_SERVICE_UNAVAILABLE, detail=MSG_BACKEND_SERVICE_UNAVAILABLE ) - - assert isinstance(s, WebServerSettings) # nosec - return s + assert isinstance(settings, WebServerSettings) # nosec + return settings def _get_encrypt(request: Request) -> Fernet | None: @@ -41,7 +38,9 @@ def get_session_cookie( # SEE services/web/server/tests/unit/with_dbs/test_login.py if fernet is None: - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE, detail=UNAVAILBLE_MSG) + raise HTTPException( + status.HTTP_503_SERVICE_UNAVAILABLE, detail=MSG_BACKEND_SERVICE_UNAVAILABLE + ) # builds session cookie cookie_name = settings.WEBSERVER_SESSION_NAME @@ -59,7 +58,7 @@ def get_session_cookie( def get_webserver_session( - app: FastAPI = Depends(_get_app), + app: FastAPI = Depends(get_app), session_cookies: dict = Depends(get_session_cookie), ) -> AuthSession: """ diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 528842978b1..87e00b88c5d 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -22,7 +22,7 @@ from ..._meta import API_VTAG from ...models.schemas.files import File -from ...modules.storage import StorageApi, StorageFileMetaData, to_file_api_model +from ...plugins.storage import StorageApi, StorageFileMetaData, to_file_api_model from ..dependencies.authentication import get_current_user_id from ..dependencies.services import get_api_client diff --git a/services/api-server/src/simcore_service_api_server/api/routes/health.py b/services/api-server/src/simcore_service_api_server/api/routes/health.py index a87dc797fe9..93ed49c758b 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/health.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/health.py @@ -7,10 +7,10 @@ from models_library.app_diagnostics import AppStatusCheck from ..._meta import API_VERSION, PROJECT_NAME -from ...modules.catalog import CatalogApi -from ...modules.director_v2 import DirectorV2Api -from ...modules.storage import StorageApi -from ...modules.webserver import WebserverApi +from ...plugins.catalog import CatalogApi +from ...plugins.director_v2 import DirectorV2Api +from ...plugins.storage import StorageApi +from ...plugins.webserver import WebserverApi from ..dependencies.application import get_reverse_url_mapper from ..dependencies.services import get_api_client diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py index 9c5f5c767cd..8e64b87810c 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py @@ -11,12 +11,12 @@ from ...core.settings import BasicSettings from ...models.basic_types import VersionStr from ...models.schemas.solvers import Solver, SolverKeyId, SolverPort -from ...modules.catalog import CatalogApi +from ...plugins.catalog import CatalogApi from ..dependencies.application import get_product_name, get_reverse_url_mapper from ..dependencies.authentication import get_current_user_id from ..dependencies.services import get_api_client -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) router = APIRouter() settings = BasicSettings.create_from_envs() @@ -193,7 +193,7 @@ async def list_solver_ports( except ValidationError as err: error_code = create_error_code(err) - logger.exception( + _logger.exception( "Corrupted port data for service %s [%s]", f"{solver_key}:{version}", f"{error_code}", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 2811c9a2d75..5edcb232803 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -18,9 +18,9 @@ from ...models.schemas.files import File from ...models.schemas.jobs import ArgumentType, Job, JobInputs, JobOutputs, JobStatus from ...models.schemas.solvers import Solver, SolverKeyId -from ...modules.catalog import CatalogApi -from ...modules.director_v2 import DirectorV2Api, DownloadLink, NodeName -from ...modules.storage import StorageApi, to_file_api_model +from ...plugins.catalog import CatalogApi +from ...plugins.director_v2 import DirectorV2Api, DownloadLink, NodeName +from ...plugins.storage import StorageApi, to_file_api_model from ...utils.solver_job_models_converters import ( create_job_from_project, create_jobstatus_from_task, @@ -33,7 +33,7 @@ from ..dependencies.services import get_api_client from ..dependencies.webserver import AuthSession, get_webserver_session -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) router = APIRouter() @@ -73,7 +73,7 @@ async def list_jobs( version=version, product_name=product_name, ) - logger.debug("Listing Jobs in Solver '%s'", solver.name) + _logger.debug("Listing Jobs in Solver '%s'", solver.name) projects: list[Project] = await webserver_api.list_projects(solver.name) jobs: deque[Job] = deque() @@ -116,7 +116,7 @@ async def create_job( # creates NEW job as prototype pre_job = Job.create_solver_job(solver=solver, inputs=inputs) - logger.debug("Creating Job '%s'", pre_job.name) + _logger.debug("Creating Job '%s'", pre_job.name) # -> catalog # TODO: validate inputs against solver input schema @@ -154,7 +154,7 @@ async def get_job( """Gets job of a given solver""" job_name = _compose_job_resource_name(solver_key, version, job_id) - logger.debug("Getting Job '%s'", job_name) + _logger.debug("Getting Job '%s'", job_name) project: Project = await webserver_api.get_project(project_id=job_id) @@ -182,7 +182,7 @@ async def start_job( """ job_name = _compose_job_resource_name(solver_key, version, job_id) - logger.debug("Start Job '%s'", job_name) + _logger.debug("Start Job '%s'", job_name) task = await director2_api.start_computation( project_id=job_id, @@ -205,7 +205,7 @@ async def stop_job( director2_api: DirectorV2Api = Depends(get_api_client(DirectorV2Api)), ): job_name = _compose_job_resource_name(solver_key, version, job_id) - logger.debug("Stopping Job '%s'", job_name) + _logger.debug("Stopping Job '%s'", job_name) await director2_api.stop_computation(job_id, user_id) @@ -226,7 +226,7 @@ async def inspect_job( director2_api: DirectorV2Api = Depends(get_api_client(DirectorV2Api)), ): job_name = _compose_job_resource_name(solver_key, version, job_id) - logger.debug("Inspecting Job '%s'", job_name) + _logger.debug("Inspecting Job '%s'", job_name) task = await director2_api.get_computation(job_id, user_id) job_status: JobStatus = create_jobstatus_from_task(task) @@ -247,7 +247,7 @@ async def get_job_outputs( storage_client: StorageApi = Depends(get_api_client(StorageApi)), ): job_name = _compose_job_resource_name(solver_key, version, job_id) - logger.debug("Get Job '%s' outputs", job_name) + _logger.debug("Get Job '%s' outputs", job_name) project: Project = await webserver_api.get_project(project_id=job_id) node_ids = list(project.workbench.keys()) @@ -328,7 +328,7 @@ async def get_job_output_logfile( ), "Current version only supports one node per solver" for presigned_download_link in logs_urls.values(): - logger.info( + _logger.info( "Redirecting '%s' to %s ...", f"{solver_key}/releases/{version}/jobs/{job_id}/outputs/logfile", presigned_download_link, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies.py b/services/api-server/src/simcore_service_api_server/api/routes/studies.py index 516fa0b6eff..d6cc0329c76 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies.py @@ -5,10 +5,10 @@ from ...core.settings import BasicSettings from ...models.schemas.studies import StudyID, StudyPort -from ...modules.webserver import AuthSession +from ...plugins.webserver import AuthSession from ..dependencies.webserver import get_webserver_session -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) router = APIRouter() settings = BasicSettings.create_from_envs() diff --git a/services/api-server/src/simcore_service_api_server/api/routes/users.py b/services/api-server/src/simcore_service_api_server/api/routes/users.py index ee025777069..9d2fedf6f15 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/users.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/users.py @@ -3,10 +3,10 @@ from fastapi import APIRouter, Depends, Security from ...models.schemas.profiles import Profile, ProfileUpdate -from ...modules.webserver import AuthSession +from ...plugins.webserver import AuthSession from ..dependencies.webserver import get_webserver_session -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) router = APIRouter() diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 9d7cd68dc6b..58b19d18655 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -18,13 +18,13 @@ from ..api.errors.validation_error import http422_error_handler from ..api.root import create_router from ..api.routes.health import router as health_router -from ..modules import catalog, director_v2, remote_debug, storage, webserver +from ..plugins import catalog, director_v2, remote_debug, storage, webserver from .events import create_start_app_handler, create_stop_app_handler from .openapi import override_openapi_method, use_route_names_as_operation_ids from .redoc import create_redoc_handler from .settings import ApplicationSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) def init_app(settings: ApplicationSettings | None = None) -> FastAPI: @@ -32,10 +32,10 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: settings = ApplicationSettings.create_from_envs() assert settings # nosec - logging.basicConfig(level=settings.LOG_LEVEL.value) - logging.root.setLevel(settings.LOG_LEVEL.value) + logging.basicConfig(level=settings.log_level) + logging.root.setLevel(settings.log_level) config_all_loggers(settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED) - logger.debug("App settings:\n%s", settings.json(indent=2)) + _logger.debug("App settings:\n%s", settings.json(indent=2)) # creates app instance app = FastAPI( diff --git a/services/api-server/src/simcore_service_api_server/core/events.py b/services/api-server/src/simcore_service_api_server/core/events.py index 44305123e3a..3e7b2403439 100644 --- a/services/api-server/src/simcore_service_api_server/core/events.py +++ b/services/api-server/src/simcore_service_api_server/core/events.py @@ -3,40 +3,28 @@ from fastapi import FastAPI -from .._meta import PROJECT_NAME, __version__ +from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG from ..db.events import close_db_connection, connect_to_db logger = logging.getLogger(__name__) -# -# https://patorjk.com/software/taag/#p=display&f=JS%20Stick%20Letters&t=API-server%0A -# -WELCOME_MSG = r""" - __ __ ___ __ ___ __ - /\ |__) | __ /__` |__ |__) \ / |__ |__) -/~~\ | | .__/ |___ | \ \/ |___ | \ {} - -""".format( - f"v{__version__}" -) - def create_start_app_handler(app: FastAPI) -> Callable: - async def on_startup() -> None: - logger.info("Application starting") + async def _on_startup() -> None: + logger.info("Application starting ...") if app.state.settings.API_SERVER_POSTGRES: # database await connect_to_db(app) assert app.state.engine # nosec - print(WELCOME_MSG, flush=True) + print(APP_STARTED_BANNER_MSG, flush=True) - return on_startup + return _on_startup def create_stop_app_handler(app: FastAPI) -> Callable: - async def on_shutdown() -> None: - logger.info("Application stopping") + async def _on_shutdown() -> None: + logger.info("Application stopping, ...") if app.state.settings.API_SERVER_POSTGRES: try: @@ -50,7 +38,6 @@ async def on_shutdown() -> None: stack_info=app.state.settings.debug, ) - msg = PROJECT_NAME + f" v{__version__} SHUT DOWN" - print(f"{msg:=^100}") + print(APP_FINISHED_BANNER_MSG, flush=True) - return on_shutdown + return _on_shutdown diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index aeb51ac4f71..779d6ed80ec 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -1,34 +1,29 @@ from functools import cached_property +from pathlib import Path from models_library.basic_types import BootModeEnum, LogLevel -from pydantic import AnyHttpUrl, Field, SecretStr +from pydantic import Field, SecretStr from pydantic.class_validators import validator from settings_library.base import BaseCustomSettings +from settings_library.basic_types import PortInt, VersionTag from settings_library.catalog import CatalogSettings from settings_library.postgres import PostgresSettings +from settings_library.storage import StorageSettings from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings +from settings_library.utils_service import ( + DEFAULT_AIOHTTP_PORT, + DEFAULT_FASTAPI_PORT, + MixinServiceSettings, + URLPart, +) from settings_library.utils_session import MixinSessionSettings -# SERVICES CLIENTS -------------------------------------------- - -class _UrlMixin: - def _build_url(self, prefix: str) -> str: - prefix = prefix.upper() - url: str = AnyHttpUrl.build( - scheme="http", - host=getattr(self, f"{prefix}_HOST"), - port=f"{getattr(self, f'{prefix}_PORT')}", - path=f"/{getattr(self, f'{prefix}_VTAG')}", # NOTE: it ends with /{VTAG} - ) - return url - - -class WebServerSettings(BaseCustomSettings, _UrlMixin, MixinSessionSettings): +class WebServerSettings(BaseCustomSettings, MixinServiceSettings, MixinSessionSettings): WEBSERVER_HOST: str = "webserver" - WEBSERVER_PORT: int = 8080 - WEBSERVER_VTAG: str = "v0" + WEBSERVER_PORT: PortInt = DEFAULT_AIOHTTP_PORT + WEBSERVER_VTAG: VersionTag = Field(default="v0") WEBSERVER_SESSION_SECRET_KEY: SecretStr = Field( ..., @@ -40,8 +35,14 @@ class WebServerSettings(BaseCustomSettings, _UrlMixin, MixinSessionSettings): WEBSERVER_SESSION_NAME: str = "osparc.WEBAPI_SESSION" @cached_property - def base_url(self) -> str: - return self._build_url("WEBSERVER") + def api_base_url(self) -> str: + # http://webserver:8080/v0 + url_with_vtag: str = self._compose_url( + prefix="WEBSERVER", + port=URLPart.REQUIRED, + vtag=URLPart.REQUIRED, + ) + return url_with_vtag @validator("WEBSERVER_SESSION_SECRET_KEY") @classmethod @@ -49,24 +50,30 @@ def check_valid_fernet_key(cls, v): return cls.do_check_valid_fernet_key(v) -class StorageSettings(BaseCustomSettings, _UrlMixin): - STORAGE_HOST: str = "storage" - STORAGE_PORT: int = 8080 - STORAGE_VTAG: str = "v0" +class DirectorV2Settings(BaseCustomSettings, MixinServiceSettings): + DIRECTOR_V2_HOST: str = "director-v2" + DIRECTOR_V2_PORT: PortInt = DEFAULT_FASTAPI_PORT + DIRECTOR_V2_VTAG: VersionTag = "v2" @cached_property - def base_url(self) -> str: - return self._build_url("STORAGE") - - -class DirectorV2Settings(BaseCustomSettings, _UrlMixin): - DIRECTOR_V2_HOST: str = "director-v2" - DIRECTOR_V2_PORT: int = 8000 - DIRECTOR_V2_VTAG: str = "v2" + def api_base_url(self) -> str: + # http://director-v2:8000/v2 + url_with_vtag: str = self._compose_url( + prefix="DIRECTOR_V2", + port=URLPart.REQUIRED, + vtag=URLPart.REQUIRED, + ) + return url_with_vtag @cached_property def base_url(self) -> str: - return self._build_url("DIRECTOR_V2") + # http://director-v2:8000 + origin: str = self._compose_url( + prefix="DIRECTOR_V2", + port=URLPart.REQUIRED, + vtag=URLPart.EXCLUDE, + ) + return origin # MAIN SETTINGS -------------------------------------------- @@ -85,7 +92,7 @@ class BasicSettings(BaseCustomSettings, MixinLoggingSettings): env=["API_SERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"], ) API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - False, + default=False, env=["API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED"], description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ) @@ -119,11 +126,39 @@ class ApplicationSettings(BasicSettings): # DIAGNOSTICS API_SERVER_TRACING: TracingSettings | None = Field(auto_default_from_env=True) + # DEV-TOOLS + API_SERVER_DEV_HTTP_CALLS_LOGS_PATH: Path | None = Field( + default=None, + description="If set, it activates http calls capture mechanism used to generate mock data" + "Path to store captured client calls." + "TIP: use 'API_SERVER_DEV_HTTP_CALLS_LOGS_PATH=captures.ignore.keep.log'" + "NOTE: only available in devel mode", + ) + @cached_property def debug(self) -> bool: """If True, debug tracebacks should be returned on errors.""" - return self.SC_BOOT_MODE in [ - BootModeEnum.DEBUG, - BootModeEnum.DEVELOPMENT, - BootModeEnum.LOCAL, - ] + return self.SC_BOOT_MODE is not None and self.SC_BOOT_MODE.is_devel_mode() + + @validator("API_SERVER_DEV_HTTP_CALLS_LOGS_PATH") + @classmethod + def _enable_only_in_devel_mode(cls, v, values): + if v and not ( + values + and (boot_mode := values.get("SC_BOOT_MODE")) + and boot_mode.is_devel_mode() + ): + raise ValueError( + "API_SERVER_DEV_HTTP_CALLS_LOGS_PATH only allowed in devel mode" + ) + return v + + +__all__: tuple[str, ...] = ( + "ApplicationSettings", + "BasicSettings", + "CatalogSettings", + "DirectorV2Settings", + "StorageSettings", + "WebServerSettings", +) diff --git a/services/api-server/src/simcore_service_api_server/plugins/__init__.py b/services/api-server/src/simcore_service_api_server/plugins/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/api-server/src/simcore_service_api_server/modules/catalog.py b/services/api-server/src/simcore_service_api_server/plugins/catalog.py similarity index 87% rename from services/api-server/src/simcore_service_api_server/modules/catalog.py rename to services/api-server/src/simcore_service_api_server/plugins/catalog.py index 899f6145651..4e4ebc69e5a 100644 --- a/services/api-server/src/simcore_service_api_server/modules/catalog.py +++ b/services/api-server/src/simcore_service_api_server/plugins/catalog.py @@ -14,9 +14,7 @@ from ..models.schemas.solvers import LATEST_VERSION, Solver, SolverKeyId, SolverPort from ..utils.client_base import BaseServiceClientApi, setup_client_instance -## from ..utils.client_decorators import JSON, handle_errors, handle_retry - -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) SolverNameVersionPair = tuple[SolverKeyId, str] @@ -62,12 +60,6 @@ def to_solver(self) -> Solver: # # - Error handling: What do we reraise, suppress, transform??? # -# -# TODO: handlers should not capture outputs -# @handle_errors("catalog", logger, return_json=True) -# @handle_retry(logger) -# async def get(self, path: str, *args, **kwargs) -> JSON: -# return await self.client.get(path, *args, **kwargs) @dataclass @@ -75,6 +67,8 @@ class CatalogApi(BaseServiceClientApi): """ This class acts a proxy of the catalog service It abstracts request to the catalog API service + + SEE osparc-simcore/services/catalog/openapi.json """ async def list_solvers( @@ -84,16 +78,15 @@ async def list_solvers( product_name: str, predicate: Callable[[Solver], bool] | None = None, ) -> list[Solver]: - resp = await self.client.get( + response = await self.client.get( "/services", params={"user_id": user_id, "details": True}, headers={"x-simcore-products-name": product_name}, ) - resp.raise_for_status() + response.raise_for_status() - # TODO: move this sorting down to catalog service? solvers = [] - for data in resp.json(): + for data in response.json(): try: service = TruncatedCatalogServiceOut.parse_obj(data) if service.service_type == ServiceType.COMPUTATIONAL: @@ -105,7 +98,7 @@ async def list_solvers( # NOTE: For the moment, this is necessary because there are no guarantees # at the image registry. Therefore we exclude and warn # invalid items instead of returning error - logger.warning( + _logger.warning( "Skipping invalid service returned by catalog '%s': %s", data, err, @@ -121,14 +114,14 @@ async def get_solver( service_key = urllib.parse.quote_plus(name) service_version = version - resp = await self.client.get( + response = await self.client.get( f"/services/{service_key}/{service_version}", params={"user_id": user_id}, headers={"x-simcore-products-name": product_name}, ) - resp.raise_for_status() + response.raise_for_status() - service = TruncatedCatalogServiceOut.parse_obj(resp.json()) + service = TruncatedCatalogServiceOut.parse_obj(response.json()) assert ( # nosec service.service_type == ServiceType.COMPUTATIONAL ), "Expected by SolverName regex" @@ -145,14 +138,15 @@ async def get_solver_ports( service_key = urllib.parse.quote_plus(name) service_version = version - resp = await self.client.get( + response = await self.client.get( f"/services/{service_key}/{service_version}/ports", params={"user_id": user_id}, headers={"x-simcore-products-name": product_name}, ) - resp.raise_for_status() - solver_ports = parse_obj_as(list[SolverPort], resp.json()) + response.raise_for_status() + + solver_ports = parse_obj_as(list[SolverPort], response.json()) return solver_ports async def list_latest_releases( diff --git a/services/api-server/src/simcore_service_api_server/modules/director_v2.py b/services/api-server/src/simcore_service_api_server/plugins/director_v2.py similarity index 76% rename from services/api-server/src/simcore_service_api_server/modules/director_v2.py rename to services/api-server/src/simcore_service_api_server/plugins/director_v2.py index daea1e166ed..1beb3146f03 100644 --- a/services/api-server/src/simcore_service_api_server/modules/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/plugins/director_v2.py @@ -21,7 +21,6 @@ # API MODELS --------------------------------------------- # NOTE: as services/director-v2/src/simcore_service_director_v2/models/schemas/comp_tasks.py -# TODO: shall schemas of internal APIs be in models_library as well?? or is against class ComputationTaskGet(ComputationTask): @@ -34,7 +33,6 @@ class ComputationTaskGet(ComputationTask): def guess_progress(self) -> PercentageInt: # guess progress based on self.state - # FIXME: incomplete! if self.state in [RunningState.SUCCESS, RunningState.FAILED]: return PercentageInt(100) return PercentageInt(0) @@ -54,7 +52,7 @@ class TaskLogFileGet(BaseModel): @contextmanager -def handle_errors_context(project_id: UUID): +def _handle_errors_context(project_id: UUID): try: yield @@ -88,26 +86,13 @@ def handle_errors_context(project_id: UUID): class DirectorV2Api(BaseServiceClientApi): - # NOTE: keep here tmp as reference - # @handle_errors("director", logger, return_json=True) - # @handle_retry(logger) - # async def get(self, path: str, *args, **kwargs) -> JSON: - # return await self.client.get(path, *args, **kwargs) - - # director2 API --------------------------- - # TODO: error handling - # - # HTTPStatusError: 404 Not Found - # ValidationError - # ServiceUnabalabe: 503 - async def create_computation( self, project_id: UUID, user_id: PositiveInt, product_name: str, ) -> ComputationTaskGet: - resp = await self.client.post( + response = await self.client.post( "/v2/computations", json={ "user_id": user_id, @@ -116,9 +101,8 @@ async def create_computation( "product_name": product_name, }, ) - - resp.raise_for_status() - computation_task = ComputationTaskGet(**resp.json()) + response.raise_for_status() + computation_task = ComputationTaskGet(**response.json()) return computation_task async def start_computation( @@ -128,12 +112,12 @@ async def start_computation( product_name: str, cluster_id: ClusterID | None = None, ) -> ComputationTaskGet: - with handle_errors_context(project_id): + with _handle_errors_context(project_id): extras = {} if cluster_id is not None: extras["cluster_id"] = cluster_id - resp = await self.client.post( + response = await self.client.post( "/v2/computations", json={ "user_id": user_id, @@ -143,34 +127,34 @@ async def start_computation( **extras, }, ) - resp.raise_for_status() - computation_task = ComputationTaskGet(**resp.json()) + response.raise_for_status() + computation_task = ComputationTaskGet(**response.json()) return computation_task async def get_computation( self, project_id: UUID, user_id: PositiveInt ) -> ComputationTaskGet: - resp = await self.client.get( + response = await self.client.get( f"/v2/computations/{project_id}", params={ "user_id": user_id, }, ) - resp.raise_for_status() - computation_task = ComputationTaskGet(**resp.json()) + response.raise_for_status() + computation_task = ComputationTaskGet(**response.json()) return computation_task async def stop_computation( self, project_id: UUID, user_id: PositiveInt ) -> ComputationTaskGet: - data = await self.client.post( + response = await self.client.post( f"/v2/computations/{project_id}:stop", json={ "user_id": user_id, }, ) - computation_task = ComputationTaskGet(**data.json()) + computation_task = ComputationTaskGet(**response.json()) return computation_task async def delete_computation(self, project_id: UUID, user_id: PositiveInt): @@ -186,27 +170,22 @@ async def delete_computation(self, project_id: UUID, user_id: PositiveInt): async def get_computation_logs( self, user_id: PositiveInt, project_id: UUID ) -> dict[NodeName, DownloadLink]: - resp = await self.client.get( + response = await self.client.get( f"/v2/computations/{project_id}/tasks/-/logfile", params={ "user_id": user_id, }, ) - # probably not found - resp.raise_for_status() - payload = parse_raw_as(list[TaskLogFileGet], resp.text or "[]") - return {r.task_id: r.download_link for r in payload} - # TODO: HIGHER lever interface with job* resources - # or better in another place? - async def create_job(self): - pass + # probably not found + response.raise_for_status() - async def list_jobs(self): - pass + node_to_links: dict[NodeName, DownloadLink] = {} + for r in parse_raw_as(list[TaskLogFileGet], response.text or "[]"): + if r.download_link: + node_to_links[f"{r.task_id}"] = r.download_link - async def get_job(self): - pass + return node_to_links # MODULES APP SETUP ------------------------------------------------------------- @@ -220,6 +199,6 @@ def setup(app: FastAPI, settings: DirectorV2Settings) -> None: app, DirectorV2Api, # WARNING: it has /v0 and /v2 prefixes - api_baseurl=f"http://{settings.DIRECTOR_V2_HOST}:{settings.DIRECTOR_V2_PORT}", + api_baseurl=settings.base_url, service_name="director_v2", ) diff --git a/services/api-server/src/simcore_service_api_server/modules/remote_debug.py b/services/api-server/src/simcore_service_api_server/plugins/remote_debug.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/modules/remote_debug.py rename to services/api-server/src/simcore_service_api_server/plugins/remote_debug.py diff --git a/services/api-server/src/simcore_service_api_server/modules/storage.py b/services/api-server/src/simcore_service_api_server/plugins/storage.py similarity index 77% rename from services/api-server/src/simcore_service_api_server/modules/storage.py rename to services/api-server/src/simcore_service_api_server/plugins/storage.py index da74392a073..5639b2e3c68 100644 --- a/services/api-server/src/simcore_service_api_server/modules/storage.py +++ b/services/api-server/src/simcore_service_api_server/plugins/storage.py @@ -15,7 +15,7 @@ from ..models.schemas.files import File from ..utils.client_base import BaseServiceClientApi, setup_client_instance -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) _FILE_ID_PATTERN = re.compile(r"^api\/(?P[\w-]+)\/(?P.+)$") @@ -32,9 +32,6 @@ def to_file_api_model(stored_file_meta: StorageFileMetaData) -> File: meta = File( id=file_id, # type: ignore filename=filename, - # FIXME: UploadFile gets content from the request header while here is - # mimetypes.guess_type used. Sometimes it does not match. - # Add column in meta_data table of storage and stop guessing :-) content_type=guess_type(filename)[0] or "application/octet-stream", checksum=stored_file_meta.entity_tag, ) @@ -47,25 +44,18 @@ class StorageApi(BaseServiceClientApi): # SIMCORE_S3_ID = 0 - # FIXME: error handling and retrying policies? - # @handle_errors("storage", logger, return_json=True) - # @handle_retry(logger) - # async def get(self, path: str, *args, **kwargs) -> JSON: - # return await self.client.get(path, *args, **kwargs) - async def list_files(self, user_id: int) -> list[StorageFileMetaData]: """Lists metadata of all s3 objects name as api/* from a given user""" - resp = await self.client.post( + response = await self.client.post( "/simcore-s3/files/metadata:search", params={ "user_id": str(user_id), "startswith": "api/", }, ) - # FIXME: handle HTTPStatusError - resp.raise_for_status() + response.raise_for_status() - files_metadata = FileMetaDataArray(__root__=resp.json()["data"] or []) + files_metadata = FileMetaDataArray(__root__=response.json()["data"] or []) files: list[StorageFileMetaData] = files_metadata.__root__ return files @@ -75,14 +65,15 @@ async def search_files( # NOTE: can NOT use /locations/0/files/metadata with uuid_filter=api/ because # logic in storage 'wrongly' assumes that all data is associated to a project and # here there is no project, so it would always returns an empty - resp = await self.client.post( + response = await self.client.post( "/simcore-s3/files/metadata:search", params={ "user_id": str(user_id), "startswith": f"api/{file_id}", }, ) - files_metadata = FileMetaDataArray(__root__=resp.json()["data"] or []) + + files_metadata = FileMetaDataArray(__root__=response.json()["data"] or []) files: list[StorageFileMetaData] = files_metadata.__root__ return files @@ -91,12 +82,12 @@ async def get_download_link( ) -> AnyUrl: object_path = urllib.parse.quote_plus(f"api/{file_id}/{file_name}") - resp = await self.client.get( + response = await self.client.get( f"/locations/{self.SIMCORE_S3_ID}/files/{object_path}", params={"user_id": str(user_id)}, ) - presigned_link: PresignedLink = PresignedLink.parse_obj(resp.json()["data"]) + presigned_link: PresignedLink = PresignedLink.parse_obj(response.json()["data"]) link: AnyUrl = presigned_link.link return link @@ -105,11 +96,12 @@ async def get_upload_links( ) -> FileUploadSchema: object_path = urllib.parse.quote_plus(f"api/{file_id}/{file_name}") - resp = await self.client.put( + response = await self.client.put( f"/locations/{self.SIMCORE_S3_ID}/files/{object_path}", params={"user_id": user_id, "file_size": 0}, ) - enveloped_data = Envelope[FileUploadSchema].parse_obj(resp.json()) + + enveloped_data = Envelope[FileUploadSchema].parse_obj(response.json()) assert enveloped_data.data # nosec return enveloped_data.data @@ -127,16 +119,14 @@ async def create_soft_link( # ln makes links between files # ln TARGET LINK_NAME - resp = await self.client.post( + response = await self.client.post( f"/files/{file_id}:soft-copy", params={"user_id": user_id}, json={"link_id": link_path}, ) - # FIXME: handle errors properly - resp.raise_for_status() + response.raise_for_status() - # FIXME: was hanging when resp.join()["data"] -> None - stored_file_meta = StorageFileMetaData.parse_obj(resp.json()["data"]) + stored_file_meta = StorageFileMetaData.parse_obj(response.json()["data"]) file_meta: File = to_file_api_model(stored_file_meta) return file_meta @@ -149,7 +139,7 @@ def setup(app: FastAPI, settings: StorageSettings) -> None: settings = StorageSettings() setup_client_instance( - app, StorageApi, api_baseurl=settings.base_url, service_name="storage" + app, StorageApi, api_baseurl=settings.api_base_url, service_name="storage" ) diff --git a/services/api-server/src/simcore_service_api_server/modules/webserver.py b/services/api-server/src/simcore_service_api_server/plugins/webserver.py similarity index 81% rename from services/api-server/src/simcore_service_api_server/modules/webserver.py rename to services/api-server/src/simcore_service_api_server/plugins/webserver.py index b7f78220093..98981fb9dd4 100644 --- a/services/api-server/src/simcore_service_api_server/modules/webserver.py +++ b/services/api-server/src/simcore_service_api_server/plugins/webserver.py @@ -1,4 +1,3 @@ -import base64 import json import logging from collections import deque @@ -26,7 +25,7 @@ from ..models.types import JSON, ListAnyDict from ..utils.client_base import BaseServiceClientApi, setup_client_instance -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @dataclass @@ -64,14 +63,14 @@ def _postprocess(cls, resp: Response) -> JSON | None: body = resp.json() data, error = body.get("data"), body.get("error") except json.JSONDecodeError: - logger.warning( + _logger.warning( "Failed to unenvelop webserver response %s", f"{resp.text=}", exc_info=True, ) if resp.is_server_error: - logger.error( + _logger.error( "webserver error %s [%s]: %s", f"{resp.status_code=}", f"{resp.reason_phrase=}", @@ -86,17 +85,13 @@ def _postprocess(cls, resp: Response) -> JSON | None: return data # OPERATIONS - # TODO: refactor and code below - # TODO: policy to retry if NetworkError/timeout? - # TODO: add ping to healthcheck async def get(self, path: str) -> JSON | None: url = path.lstrip("/") try: resp = await self.client.get(url, cookies=self.session_cookies) except Exception as err: - # FIXME: error handling - logger.exception("Failed to get %s", url) + _logger.exception("Failed to get %s", url) raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._postprocess(resp) @@ -106,13 +101,12 @@ async def put(self, path: str, body: dict) -> JSON | None: try: resp = await self.client.put(url, json=body, cookies=self.session_cookies) except Exception as err: - logger.exception("Failed to put %s", url) + _logger.exception("Failed to put %s", url) raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._postprocess(resp) # PROJECTS resource --- - # TODO: error handling! async def create_project(self, project: NewProjectIn): # POST /projects --> 202 @@ -134,7 +128,7 @@ async def create_project(self, project: NewProjectIn): wait=wait_fixed(0.5), stop=stop_after_delay(60), reraise=True, - before_sleep=before_sleep_log(logger, logging.INFO), + before_sleep=before_sleep_log(_logger, logging.INFO), ): with attempt: data = await self.get(status_url) @@ -155,7 +149,6 @@ async def get_project(self, project_id: UUID) -> Project: return Project.parse_obj(data) async def list_projects(self, solver_name: str) -> list[Project]: - # TODO: pagination? resp = await self.client.get( "/projects", params={"type": "user", "show_hidden": True}, @@ -164,7 +157,6 @@ async def list_projects(self, solver_name: str) -> list[Project]: data: ListAnyDict = cast(ListAnyDict, self._postprocess(resp)) or [] - # FIXME: move filter to webserver API (next PR) projects: deque[Project] = deque() for prj in data: possible_job_name = prj.get("name", "") @@ -172,7 +164,7 @@ async def list_projects(self, solver_name: str) -> list[Project]: try: projects.append(Project.parse_obj(prj)) except ValidationError as err: - logger.warning( + _logger.warning( "Invalid prj %s [%s]: %s", prj.get("uuid"), solver_name, err ) @@ -195,28 +187,9 @@ async def get_project_metadata_ports( return data -def _get_secret_key(settings: WebServerSettings): - secret_key_bytes = settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value().encode( - "utf-8" - ) - while len(secret_key_bytes) < 32: - secret_key_bytes += secret_key_bytes - secret_key = secret_key_bytes[:32] - - if isinstance(secret_key, str): - pass - elif isinstance(secret_key, (bytes, bytearray)): - secret_key = base64.urlsafe_b64encode(secret_key) - return secret_key - - class WebserverApi(BaseServiceClientApi): """Access to web-server API""" - # def create_auth_session(self, session_cookies) -> AuthSession: - # """ Needed per request, so it can perform """ - # return AuthSession(client=self.client, vtag="v0", session_cookies=session_cookies) - # MODULES APP SETUP ------------------------------------------------------------- @@ -228,21 +201,18 @@ def setup(app: FastAPI, settings: WebServerSettings | None = None) -> None: assert settings is not None # nosec setup_client_instance( - app, WebserverApi, api_baseurl=settings.base_url, service_name="webserver" + app, WebserverApi, api_baseurl=settings.api_base_url, service_name="webserver" ) - # TODO: old startup. need to integrat - # TODO: init client and then build sessions from client using depenencies - def on_startup() -> None: # normalize & encrypt secret_key = settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value() app.state.webserver_fernet = fernet.Fernet(secret_key) # init client - logger.debug("Setup webserver at %s...", settings.base_url) + _logger.debug("Setup webserver at %s...", settings.api_base_url) - client = AsyncClient(base_url=settings.base_url) + client = AsyncClient(base_url=settings.api_base_url) app.state.webserver_client = client async def on_shutdown() -> None: @@ -250,7 +220,7 @@ async def on_shutdown() -> None: client: AsyncClient = app.state.webserver_client await client.aclose() del app.state.webserver_client - logger.debug("Webserver closed successfully") + _logger.debug("Webserver closed successfully") app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) diff --git a/services/api-server/src/simcore_service_api_server/utils/client_base.py b/services/api-server/src/simcore_service_api_server/utils/client_base.py index 8e591c8ed39..dabb7633591 100644 --- a/services/api-server/src/simcore_service_api_server/utils/client_base.py +++ b/services/api-server/src/simcore_service_api_server/utils/client_base.py @@ -1,13 +1,17 @@ import logging +from contextlib import suppress from dataclasses import dataclass -from typing import Optional +from pathlib import Path import httpx from fastapi import FastAPI +from httpx._types import URLTypes +from pydantic import ValidationError from .app_data import AppDataMixin +from .http_calls_capture import get_captured_as_json -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @dataclass @@ -30,36 +34,84 @@ async def is_responsive(self) -> bool: resp.raise_for_status() return True except (httpx.HTTPStatusError, httpx.RequestError) as err: - log.error("%s not responsive: %s", self.service_name, err) + _logger.error("%s not responsive: %s", self.service_name, err) return False ping = is_responsive # alias +class _AsyncClientForDevelopmentOnly(httpx.AsyncClient): + """ + Adds captures mechanism + """ + + async def request(self, method: str, url: URLTypes, **kwargs): + response: httpx.Response = await super().request(method, url, **kwargs) + + capture_name = f"{method} {url}" + _logger.info("Capturing %s ... [might be slow]", capture_name) + try: + capture_json = get_captured_as_json(name=capture_name, response=response) + _capture_logger.info("%s", capture_json) + except ValidationError: + _capture_logger.exception("Failed capturing %s", capture_name) + + return response + + # HELPERS ------------------------------------------------------------- +_capture_logger = logging.getLogger(f"{__name__}.capture") + + +def _setup_capture_logger_once(capture_path: Path) -> None: + """NOTE: this is only to capture during developmetn""" + + if not any( + isinstance(hnd, logging.FileHandler) for hnd in _capture_logger.handlers + ): + file_handler = logging.FileHandler(filename=f"{capture_path}") + file_handler.setLevel(logging.INFO) + + formatter = logging.Formatter("%(asctime)s - %(message)s") + file_handler.setFormatter(formatter) + + _capture_logger.addHandler(file_handler) + _logger.info("Setup capture logger at %s", capture_path) + _capture_logger.info("Started capture session ...") + def setup_client_instance( app: FastAPI, api_cls: type[BaseServiceClientApi], api_baseurl, service_name: str, - **extra_fields + **extra_fields, ) -> None: """Helper to add init/cleanup of ServiceClientApi instances in the app lifespam""" assert issubclass(api_cls, BaseServiceClientApi) # nosec + # Http client class + client_class: type = httpx.AsyncClient + with suppress(AttributeError): + # NOTE that this is a general function with no guarantees as when is going to be used. + # Here, 'AttributeError' might be raied when app.state.settings is still not initialized + if capture_path := app.state.settings.API_SERVER_DEV_HTTP_CALLS_LOGS_PATH: + _setup_capture_logger_once(capture_path) + client_class = _AsyncClientForDevelopmentOnly + + # events def _create_instance() -> None: api_cls.create_once( app, - client=httpx.AsyncClient(base_url=api_baseurl), + client=client_class(base_url=api_baseurl), service_name=service_name, - **extra_fields + **extra_fields, ) async def _cleanup_instance() -> None: - api_obj: Optional[BaseServiceClientApi] = api_cls.pop_instance(app) + api_obj: BaseServiceClientApi | None = api_cls.pop_instance(app) if api_obj: await api_obj.client.aclose() diff --git a/services/api-server/src/simcore_service_api_server/utils/http_calls_capture.py b/services/api-server/src/simcore_service_api_server/utils/http_calls_capture.py new file mode 100644 index 00000000000..de02bff1a46 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/utils/http_calls_capture.py @@ -0,0 +1,54 @@ +import json +from http import HTTPStatus +from typing import Any, Literal + +import httpx +from pydantic import BaseModel, Field + + +class HttpApiCallCaptureModel(BaseModel): + """ + Captures relevant information of a call to the http api + """ + + name: str + description: str + method: Literal["GET", "PUT", "POST", "PATCH", "DELETE"] + path: str + query: str | None = None + request_payload: dict[str, Any] | None = None + response_body: dict[str, Any] | list | None = None + status_code: HTTPStatus = Field(default=HTTPStatus.OK) + + @classmethod + def create_from_response( + cls, response: httpx.Response, name: str, description: str = "" + ) -> "HttpApiCallCaptureModel": + request = response.request + + return cls( + name=name, + description=description or f"{request}", + method=request.method, + path=request.url.path, + query=request.url.query.decode() or None, + request_payload=json.loads(request.content.decode()) + if request.content + else None, + response_body=response.json(), + status_code=response.status_code, + ) + + def __str__(self) -> str: + return f"{self.description: self.request_desc}" + + @property + def request_desc(self) -> str: + return f"{self.method} {self.path}" + + +def get_captured_as_json(name: str, response: httpx.Response) -> str: + capture_json: str = HttpApiCallCaptureModel.create_from_response( + response, name=name + ).json(indent=1) + return f"{capture_json}" diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py index 0683d954a88..c4e0c7befd6 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py @@ -22,7 +22,7 @@ from ..models.schemas.files import File from ..models.schemas.jobs import ArgumentType, Job, JobInputs, JobStatus, TaskStates from ..models.schemas.solvers import Solver, SolverKeyId -from ..modules.director_v2 import ComputationTaskGet +from ..plugins.director_v2 import ComputationTaskGet from .typing_extra import get_types # UTILS ------ diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index 27f03a23e17..3b9a2829ab0 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -8,11 +8,12 @@ import pytest import simcore_service_api_server from dotenv import dotenv_values -from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.utils_envs import EnvVarsDict CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent pytest_plugins = [ + "pytest_simcore.cli_runner", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", @@ -52,20 +53,11 @@ def default_app_env_vars( env_vars.update(project_env_devel_vars) env_vars.update(dockerfile_env_vars) env_vars["API_SERVER_DEV_FEATURES_ENABLED"] = "1" + env_vars["API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED"] = "1" return env_vars -@pytest.fixture -def patched_default_app_environ( - monkeypatch: pytest.MonkeyPatch, default_app_env_vars: EnvVarsDict -) -> EnvVarsDict: - """default environment for testing""" - - setenvs_from_dict(monkeypatch, default_app_env_vars) - return default_app_env_vars - - ## FOLDER LAYOUT ---- diff --git a/services/api-server/tests/integration/.gitkeep b/services/api-server/tests/integration/.gitkeep deleted file mode 100644 index 97834dcf47d..00000000000 --- a/services/api-server/tests/integration/.gitkeep +++ /dev/null @@ -1 +0,0 @@ -# Use tests/public-api diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index 307fccde7c3..c9bfbaa386b 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -9,7 +9,7 @@ import subprocess from pathlib import Path from pprint import pformat -from typing import Callable, Dict, Union +from typing import Callable import aiopg.sa import aiopg.sa.engine as aiopg_sa_engine @@ -23,10 +23,13 @@ from aiopg.sa.result import RowProxy from faker import Faker from fastapi import FastAPI +from pytest import MonkeyPatch from pytest_simcore.helpers.rawdata_fakers import random_user from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.utils_envs import setenvs_from_dict from simcore_postgres_database.models.base import metadata from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.settings import PostgresSettings from simcore_service_api_server.db.repositories import BaseRepository from simcore_service_api_server.db.repositories.users import UsersRepository from simcore_service_api_server.models.domain.api_keys import ApiKeyInDB @@ -64,7 +67,7 @@ def docker_compose_file( @pytest.fixture(scope="session") -def postgres_service(docker_services, docker_ip, docker_compose_file: Path) -> Dict: +def postgres_service(docker_services, docker_ip, docker_compose_file: Path) -> dict: # check docker-compose's environ is resolved properly config = yaml.safe_load(docker_compose_file.read_text()) @@ -108,7 +111,7 @@ def is_postgres_responsive() -> bool: def make_engine(postgres_service: dict) -> Callable: dsn = postgres_service["dsn"] # session scope freezes dsn - def maker(*, is_async=True) -> Union[aiopg_sa_engine.Engine, sa_engine.Engine]: + def maker(*, is_async=True) -> aiopg_sa_engine.Engine | sa_engine.Engine: if is_async: return aiopg.sa.create_engine(dsn) return sa.create_engine(dsn) @@ -137,7 +140,22 @@ def migrated_db(postgres_service: dict, make_engine: Callable): @pytest.fixture -def app(patched_default_app_environ: EnvVarsDict, migrated_db: None) -> FastAPI: +def app_environment( + monkeypatch: MonkeyPatch, default_app_env_vars: EnvVarsDict +) -> EnvVarsDict: + """app environments WITH database settings""" + + envs = setenvs_from_dict(monkeypatch, default_app_env_vars) + assert "API_SERVER_POSTGRES" not in envs + + # Should be sufficient to create settings + print(PostgresSettings.create_from_envs().json(indent=1)) + + return envs + + +@pytest.fixture +def app(app_environment: EnvVarsDict, migrated_db: None) -> FastAPI: """Overrides app to ensure that: - it uses default environ as pg - db is started and initialized diff --git a/services/api-server/tests/unit/_with_db/test_api_user.py b/services/api-server/tests/unit/_with_db/test_api_user.py index 74ff9aaffdf..87d2de26c64 100644 --- a/services/api-server/tests/unit/_with_db/test_api_user.py +++ b/services/api-server/tests/unit/_with_db/test_api_user.py @@ -26,7 +26,7 @@ def mocked_webserver_service_api(app: FastAPI): # pylint: disable=not-context-manager with respx.mock( - base_url=settings.API_SERVER_WEBSERVER.base_url, + base_url=settings.API_SERVER_WEBSERVER.api_base_url, assert_all_called=False, assert_all_mocked=True, ) as respx_mock: diff --git a/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py b/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py new file mode 100644 index 00000000000..4cb770979f0 --- /dev/null +++ b/services/api-server/tests/unit/_with_db/test_core_settings__with_db.py @@ -0,0 +1,22 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +import logging + +from pytest_simcore.helpers.utils_envs import EnvVarsDict +from simcore_service_api_server.core.settings import ApplicationSettings, BootModeEnum +from yarl import URL + + +def test_unit_with_db_app_environment(app_environment: EnvVarsDict): + settings = ApplicationSettings.create_from_envs() + print("captured settings: \n", settings.json(indent=2)) + + assert settings.SC_BOOT_MODE == BootModeEnum.PRODUCTION + assert settings.log_level == logging.DEBUG + + assert URL(settings.API_SERVER_POSTGRES.dsn) == URL( + "postgresql://test:test@127.0.0.1:5432/test" + ) diff --git a/services/api-server/tests/unit/api_solvers/conftest.py b/services/api-server/tests/unit/api_solvers/conftest.py index 13e690e6cf3..6ed52f818cc 100644 --- a/services/api-server/tests/unit/api_solvers/conftest.py +++ b/services/api-server/tests/unit/api_solvers/conftest.py @@ -8,12 +8,15 @@ from pathlib import Path from typing import Any, Iterator +import httpx import pytest import respx import yaml +from faker import Faker from fastapi import FastAPI, status +from models_library.projects import Project +from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.helpers import faker_catalog -from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from respx import MockRouter from simcore_service_api_server.core.settings import ApplicationSettings @@ -48,7 +51,7 @@ def webserver_service_openapi_specs( @pytest.fixture def mocked_webserver_service_api( - app: FastAPI, webserver_service_openapi_specs: dict[str, Any] + app: FastAPI, webserver_service_openapi_specs: dict[str, Any], faker: Faker ) -> Iterator[MockRouter]: settings: ApplicationSettings = app.state.settings assert settings.API_SERVER_WEBSERVER @@ -57,22 +60,21 @@ def mocked_webserver_service_api( # pylint: disable=not-context-manager with respx.mock( - base_url=settings.API_SERVER_WEBSERVER.base_url, + base_url=settings.API_SERVER_WEBSERVER.api_base_url, assert_all_called=False, assert_all_mocked=True, ) as respx_mock: # include /v0 - assert settings.API_SERVER_WEBSERVER.base_url.endswith("/v0") + assert settings.API_SERVER_WEBSERVER.api_base_url.endswith("/v0") # healthcheck_readiness_probe, healthcheck_liveness_probe - response_body = ( - { - "data": openapi["paths"]["/"]["get"]["responses"]["200"]["content"][ - "application/json" - ]["schema"]["properties"]["data"]["example"] - }, - ) + response_body = { + "data": openapi["paths"]["/"]["get"]["responses"]["200"]["content"][ + "application/json" + ]["schema"]["properties"]["data"]["example"] + } + respx_mock.get(path="/v0/", name="healthcheck_readiness_probe").respond( status.HTTP_200_OK, json=response_body ) @@ -80,7 +82,55 @@ def mocked_webserver_service_api( status.HTTP_200_OK, json=response_body ) - # get_task_status + class _SideEffects: + def __init__(self): + # cached + self._projects: dict[str, Project] = {} + + @staticmethod + def get_body_as_json(request): + return json.load(request.stream) + + def create_project(self, request: httpx.Request): + task_id = faker.uuid4() + + project_create = self.get_body_as_json(request) + self._projects[task_id] = Project(**project_create) + + return httpx.Response( + status.HTTP_202_ACCEPTED, + json={ + "data": { + "task_id": task_id, + "status_href": f"{settings.API_SERVER_WEBSERVER.api_base_url}/tasks/{task_id}", + "result_href": f"{settings.API_SERVER_WEBSERVER.api_base_url}/tasks/{task_id}/result", + } + }, + ) + + def get_result(self, request: httpx.Request, *, task_id: str): + # TODO: replace with ProjectGet + project_get = jsonable_encoder( + self._projects[task_id].dict(by_alias=True) + ) + return httpx.Response( + status.HTTP_200_OK, + json={"data": project_get}, + ) + + fake_workflow = _SideEffects() + + # http://webserver:8080/v0/projects?hidden=true + respx_mock.post( + path__regex="/projects$", + name="create_projects", + ).mock(side_effect=fake_workflow.create_project) + + respx_mock.get( + path__regex=r"/tasks/(?P[\w-]+)/result$", + name="get_task_result", + ).mock(side_effect=fake_workflow.get_result) + respx_mock.get( path__regex=r"/tasks/(?P[\w/%]+)", name="get_task_status", @@ -88,35 +138,13 @@ def mocked_webserver_service_api( status.HTTP_200_OK, json={ "data": { - "task_progress": 1, + "task_progress": {"message": "fake job done", "percent": 1}, "done": True, "started": "2018-07-01T11:13:43Z", } }, ) - # get_task_result - respx_mock.get( - path__regex=r"/tasks/(?P[\w/%]+)/result", - name="get_task_result", - ).respond( - status.HTTP_200_OK, - json=GET_PROJECT.response_body, - ) - - # create_projects - task_id = "abc" - # http://webserver:8080/v0/projects?hidden=true - respx_mock.post(path__regex="/projects$", name="create_projects").respond( - status.HTTP_202_ACCEPTED, - json={ - "data": { - "task_id": "123", - "status_hef": f"{settings.API_SERVER_WEBSERVER.base_url}/task/{task_id}", - "result_href": f"{settings.API_SERVER_WEBSERVER.base_url}/task/{task_id}/result", - } - }, - ) yield respx_mock diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py index d35c2c30d34..2af39ccd4f2 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers.py @@ -1,4 +1,6 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable @@ -19,7 +21,7 @@ async def test_list_solvers( mocker: MockFixture, ): warn = mocker.patch.object( - simcore_service_api_server.api.routes.solvers.logger, "warning" + simcore_service_api_server.api.routes.solvers._logger, "warning" ) # list solvers latest releases diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py index 1e9e7cedee2..a83b17a0af7 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs.py @@ -98,9 +98,9 @@ def mocked_directorv2_service_api( # pylint: disable=not-context-manager with respx.mock( - base_url=settings.API_SERVER_DIRECTOR_V2.base_url, + base_url=settings.API_SERVER_DIRECTOR_V2.api_base_url, assert_all_called=False, - assert_all_mocked=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! ) as respx_mock: # check that what we emulate, actually still exists @@ -121,7 +121,7 @@ def mocked_directorv2_service_api( respx_mock.get( path__regex=r"/computations/(?P[\w-]+)/tasks/-/logfile", - name="get_computation_logs", + name="get_computation_logs", # = operation_id ).respond( status.HTTP_200_OK, json=[ @@ -218,7 +218,6 @@ def solver_version() -> str: @pytest.mark.acceptance_test( "New feature https://github.com/ITISFoundation/osparc-simcore/issues/3940" ) -@pytest.mark.xfail # TODO: will fix in next PR async def test_run_solver_job( client: httpx.AsyncClient, directorv2_service_openapi_specs: dict[str, Any], @@ -267,10 +266,12 @@ async def test_run_solver_job( "result": "string", "pipeline_details": { "adjacency_list": { - "additionalProp1": ["3fa85f64-5717-4562-b3fc-2c963f66afa6"], + "3fa85f64-5717-4562-b3fc-2c963f66afa6": [ + "3fa85f64-5717-4562-b3fc-2c963f66afa6" + ], }, "node_states": { - "additionalProp1": { + "3fa85f64-5717-4562-b3fc-2c963f66afa6": { "modified": True, "dependencies": ["3fa85f64-5717-4562-b3fc-2c963f66afa6"], "currentStatus": "NOT_STARTED", @@ -279,8 +280,8 @@ async def test_run_solver_job( }, "iteration": 1, "cluster_id": 0, - "url": "string", - "stop_url": "string", + "url": "http://test.com", + "stop_url": "http://test.com", }, ) @@ -341,9 +342,6 @@ async def test_run_solver_job( json=JobInputs(values={"x": 3.14, "n": 42}).dict(), ) assert resp.status_code == status.HTTP_200_OK - assert mocked_directorv2_service_api[ - "create_computation_v2_computations_post" - ].called assert mocked_webserver_service_api["create_projects"].called assert mocked_webserver_service_api["get_task_status"].called @@ -353,9 +351,9 @@ async def test_run_solver_job( # Start Job resp = await client.post( - f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}", + f"/v0/solvers/{solver_key}/releases/{solver_version}/jobs/{job.id}:start", auth=auth, - params={"cluster_id", 1}, + params={"cluster_id": 1}, ) assert resp.status_code == status.HTTP_200_OK assert mocked_directorv2_service_api[ diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 4d87e023333..4d9a92d7a0a 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -3,7 +3,6 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from pprint import pprint from typing import AsyncIterator, Iterator import aiohttp.test_utils @@ -16,6 +15,7 @@ from httpx._transports.asgi import ASGITransport from moto.server import ThreadedMotoServer from pydantic import HttpUrl, parse_obj_as +from pytest import MonkeyPatch from pytest_simcore.helpers.utils_docker import get_localhost_ip from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_from_dict from requests.auth import HTTPBasicAuth @@ -26,44 +26,32 @@ @pytest.fixture -def patched_light_app_environ( - patched_default_app_environ: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +def app_environment( + monkeypatch: MonkeyPatch, default_app_env_vars: EnvVarsDict ) -> EnvVarsDict: """Config that disables many plugins e.g. database or tracing""" - env_vars = {} - env_vars.update(patched_default_app_environ) - - pprint(list(ApplicationSettings.schema()["properties"].keys())) - # [ - # 'SC_BOOT_MODE', - # 'LOG_LEVEL', - # 'API_SERVER_POSTGRES', - # 'API_SERVER_WEBSERVER', - # 'API_SERVER_CATALOG', - # 'API_SERVER_STORAGE', - # 'API_SERVER_DIRECTOR_V2', - # 'API_SERVER_TRACING', - # 'API_SERVER_DEV_FEATURES_ENABLED', - # 'API_SERVER_REMOTE_DEBUG_PORT' - # ] - # - env_vars.update( + env_vars = setenvs_from_dict( + monkeypatch, { + **default_app_env_vars, "WEBSERVER_HOST": "webserver", "WEBSERVER_SESSION_SECRET_KEY": Fernet.generate_key().decode("utf-8"), "API_SERVER_POSTGRES": "null", "API_SERVER_TRACING": "null", "LOG_LEVEL": "debug", "SC_BOOT_MODE": "production", - } + }, ) - setenvs_from_dict(monkeypatch, env_vars) + + # should be sufficient to create settings + print(ApplicationSettings.create_from_envs().json(indent=1)) + return env_vars @pytest.fixture -def app(patched_light_app_environ: EnvVarsDict) -> FastAPI: +def app(app_environment: EnvVarsDict) -> FastAPI: """Inits app on a light environment""" the_app = init_app() return the_app diff --git a/services/api-server/tests/unit/test_api_routes_studies.py b/services/api-server/tests/unit/test_api_routes_studies.py index 54495fa5ae0..06f0a2cfd97 100644 --- a/services/api-server/tests/unit/test_api_routes_studies.py +++ b/services/api-server/tests/unit/test_api_routes_studies.py @@ -65,7 +65,7 @@ def mocked_webserver_service_api( # ENTRYPOINTS --------- # pylint: disable=not-context-manager with respx.mock( - base_url=settings.API_SERVER_WEBSERVER.base_url, + base_url=settings.API_SERVER_WEBSERVER.api_base_url, assert_all_called=False, assert_all_mocked=True, ) as respx_mock: @@ -114,7 +114,7 @@ def test_mocked_webserver_service_api( # settings: ApplicationSettings = app.state.settings assert settings.API_SERVER_WEBSERVER - webserver_api_baseurl = settings.API_SERVER_WEBSERVER.base_url + webserver_api_baseurl = settings.API_SERVER_WEBSERVER.api_base_url resp = httpx.get(f"{webserver_api_baseurl}/health") assert resp.status_code == status.HTTP_200_OK diff --git a/services/api-server/tests/unit/test_cli.py b/services/api-server/tests/unit/test_cli.py new file mode 100644 index 00000000000..5ae5ca4c10f --- /dev/null +++ b/services/api-server/tests/unit/test_cli.py @@ -0,0 +1,40 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import os + +from fastapi import FastAPI +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_api_server.cli import main +from simcore_service_api_server.core.settings import ApplicationSettings +from typer.testing import CliRunner + + +def test_cli_help(cli_runner: CliRunner): + result = cli_runner.invoke(main, "--help") + assert result.exit_code == os.EX_OK, result.output + + +def test_cli_run(cli_runner: CliRunner): + result = cli_runner.invoke(main, "run") + assert "disabled" in result.output + assert result.exit_code == os.EX_OK, result.output + + +def test_cli_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): + result = cli_runner.invoke(main, ["settings", "--show-secrets", "--as-json"]) + assert result.exit_code == os.EX_OK, result.output + + print(result.output) + settings = ApplicationSettings.parse_raw(result.output) + assert settings == ApplicationSettings.create_from_envs() + + +def test_main(app_environment: EnvVarsDict): + from simcore_service_api_server.main import the_app + + assert the_app + assert isinstance(the_app, FastAPI) diff --git a/services/api-server/tests/unit/test_core_settings.py b/services/api-server/tests/unit/test_core_settings.py index 94a2c89cf16..625de4c0f91 100644 --- a/services/api-server/tests/unit/test_core_settings.py +++ b/services/api-server/tests/unit/test_core_settings.py @@ -7,24 +7,13 @@ from pytest_simcore.helpers.utils_envs import EnvVarsDict from simcore_service_api_server.core.settings import ApplicationSettings, BootModeEnum -from yarl import URL -def test_default_app_environ(patched_default_app_environ: EnvVarsDict): - # loads from environ +def test_unit_app_environment(app_environment: EnvVarsDict): settings = ApplicationSettings.create_from_envs() print("captured settings: \n", settings.json(indent=2)) assert settings.SC_BOOT_MODE == BootModeEnum.PRODUCTION assert settings.log_level == logging.DEBUG - assert URL(settings.API_SERVER_POSTGRES.dsn) == URL( - "postgresql://test:test@127.0.0.1:5432/test" - ) - - -def test_light_app_environ(patched_light_app_environ: EnvVarsDict): - settings = ApplicationSettings.create_from_envs() - print("captured settings: \n", settings.json(indent=2)) - assert settings.API_SERVER_POSTGRES is None diff --git a/services/api-server/tests/unit/test_models_schemas_files.py b/services/api-server/tests/unit/test_models_schemas_files.py index e8cbdf37201..5d1c5554cf0 100644 --- a/services/api-server/tests/unit/test_models_schemas_files.py +++ b/services/api-server/tests/unit/test_models_schemas_files.py @@ -13,7 +13,7 @@ from models_library.api_schemas_storage import FileMetaDataGet as StorageFileMetaData from pydantic import ValidationError from simcore_service_api_server.models.schemas.files import File -from simcore_service_api_server.modules.storage import to_file_api_model +from simcore_service_api_server.plugins.storage import to_file_api_model FILE_CONTENT = "This is a test" diff --git a/services/api-server/tests/unit/test_utils_http_calls_capture.py b/services/api-server/tests/unit/test_utils_http_calls_capture.py new file mode 100644 index 00000000000..25eec3b087a --- /dev/null +++ b/services/api-server/tests/unit/test_utils_http_calls_capture.py @@ -0,0 +1,174 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import asyncio +import logging +import re +from contextlib import suppress +from typing import Iterable + +import docker +import httpx +import pytest +import respx +from docker.errors import APIError +from faker import Faker +from models_library.basic_regex import UUID_RE_BASE +from simcore_service_api_server.utils.http_calls_capture import HttpApiCallCaptureModel +from tenacity import retry +from tenacity.after import after_log +from tenacity.retry import retry_if_exception_type +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed + + +@pytest.fixture(scope="module") +def httpbin_base_url() -> Iterable[str]: + # yield "https://httpbin.org/" # sometimes is not available + + port = 80 + base_url = f"http://127.0.0.1:{port}" + + client = docker.from_env() + container_name = "httpbin-fixture" + try: + client.containers.run( + "kennethreitz/httpbin", + ports={port: 80}, + name=container_name, + detach=True, + ) + + @retry( + wait=wait_fixed(1), + retry=retry_if_exception_type(httpx.HTTPError), + stop=stop_after_delay(10), + after=after_log(logging.getLogger(), logging.DEBUG), + ) + def _wait_until_httpbin_is_responsive(): + r = httpx.get(f"{base_url}/get") + r.raise_for_status() + + _wait_until_httpbin_is_responsive() + + yield base_url + + finally: + with suppress(APIError): + container = client.containers.get(container_name) + container.remove(force=True) + + +async def test_capture_http_call( + event_loop: asyncio.AbstractEventLoop, httpbin_base_url +): + # CAPTURE + async with httpx.AsyncClient() as client: + + response: httpx.Response = await client.get(f"{httpbin_base_url}/json") + print(response) + + request: httpx.Request = response.request + assert response.request + + captured = HttpApiCallCaptureModel.create_from_response( + response, name="get_json" + ) + + print(captured.json(indent=1)) + + # MOCK + with respx.mock( + base_url="http://test.it", + assert_all_called=False, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as respx_mock: + + respx_mock.request( + method=captured.method, + path=captured.path, + name=captured.name, + ).respond( + status_code=captured.status_code, + json=captured.response_body, + ) + + response: httpx.Response = await client.get("http://test.it/json") + + assert respx_mock[captured.name].called + assert response.json() == captured.response_body + assert response.status_code == captured.status_code + + +async def test_capture_http_dynamic_call( + event_loop: asyncio.AbstractEventLoop, faker: Faker, httpbin_base_url: str +): + + # CAPTURE + async with httpx.AsyncClient() as client: + + sample_uid = faker.uuid4() # used during test sampling + + response: httpx.Response = await client.post( + f"{httpbin_base_url}/anything/{sample_uid}", + params={"n": 42}, + json={ + "resource_id": sample_uid, + "static": "constant", + }, + ) + print(response) + + request: httpx.Request = response.request + assert response.request + + captured = HttpApiCallCaptureModel.create_from_response( + response, name="get_anything" + ) + + assert captured.query == "n=42" + + # pattern with named-group + pattern = rf"(?P{UUID_RE_BASE})" + match = re.search(pattern, captured.path) + assert match + assert match.groupdict() == {"resouce_uid": sample_uid} + + # subs_json = re.sub(f"{resource_uid}", pattern, captured.json()) + # new_capture = HttpApiCallCaptureModel.parse_raw(subs_json) + + # MOCK + with respx.mock( + base_url="http://test.it", + assert_all_called=True, + assert_all_mocked=True, # IMPORTANT: KEEP always True! + ) as respx_mock: + + respx_mock.request( + method=captured.method, + path__regex=re.sub( + f"{sample_uid}", pattern, captured.path + ), # using REGEX + name=captured.name, + ).respond( + status_code=captured.status_code, + json=captured.response_body, + ) + + other_uid = faker.uuid4() + + response: httpx.Response = await client.post( + f"http://test.it/anything/{other_uid}", + params={"n": 42}, + json={ + "resource_id": other_uid, + "static": "constant", + }, + ) + + assert respx_mock[captured.name].called + assert response.json() == captured.response_body + assert response.status_code == captured.status_code diff --git a/services/api-server/tests/unit/test_utils_solver_job_models_converters.py b/services/api-server/tests/unit/test_utils_solver_job_models_converters.py index 3f2c4194413..83b6c8037ee 100644 --- a/services/api-server/tests/unit/test_utils_solver_job_models_converters.py +++ b/services/api-server/tests/unit/test_utils_solver_job_models_converters.py @@ -212,7 +212,7 @@ def test_create_job_from_project(): @pytest.mark.skip(reason="TODO: next PR") def test_create_jobstatus_from_task(): from simcore_service_api_server.models.schemas.jobs import JobStatus - from simcore_service_api_server.modules.director_v2 import ComputationTaskGet + from simcore_service_api_server.plugins.director_v2 import ComputationTaskGet task = ComputationTaskGet.parse_obj({}) # TODO: job_status: JobStatus = create_jobstatus_from_task(task)