diff --git a/packages/models-library/src/models_library/function_service/demo_units.py b/packages/models-library/src/models_library/function_service/demo_units.py
index 30e6996fc00..6337668cdcf 100644
--- a/packages/models-library/src/models_library/function_service/demo_units.py
+++ b/packages/models-library/src/models_library/function_service/demo_units.py
@@ -5,7 +5,7 @@
def build_input(schema):
description = schema.pop("description", schema["title"])
-
+
return {
"label": schema["title"],
"description": description,
@@ -15,11 +15,14 @@ def build_input(schema):
# SEE https://github.com/hgrecco/pint/blob/master/pint/default_en.txt
+
META = ServiceDockerData.parse_obj(
{
"integration-version": LATEST_INTEGRATION_VERSION,
"key": f"{FUNCTION_SERVICE_KEY_PREFIX}/data-iterator/demo-units",
- "version": "0.1.0",
+ "version": "0.2.0",
+ # CHANGELOG
+ # - 0.2.0: reverted order of first 5 outputs
"type": ServiceType.BACKEND,
"name": "Demo Units",
"description": "Demo that takes base units as inputs and transform them in the outputs",
@@ -100,19 +103,18 @@ def build_input(schema):
),
},
"outputs": {
- "length": build_input(
+ "mass": build_input(
{
- "title": "Distance",
- "description": "Distance value converted",
- "x_unit": "milli-meter",
+ "title": "Mass",
+ "minimum": 0,
+ "x_unit": "kilo-gram",
"type": "number",
}
),
- "time": build_input(
+ "luminosity": build_input(
{
- "title": "Time",
- "minimum": 0,
- "x_unit": "minute",
+ "title": "Luminosity",
+ "x_unit": "candela",
"type": "number",
}
),
@@ -123,18 +125,19 @@ def build_input(schema):
"type": "number",
}
),
- "luminosity": build_input(
+ "time": build_input(
{
- "title": "Luminosity",
- "x_unit": "candela",
+ "title": "Time",
+ "minimum": 0,
+ "x_unit": "minute",
"type": "number",
}
),
- "mass": build_input(
+ "length": build_input(
{
- "title": "Mass",
- "minimum": 0,
- "x_unit": "kilo-gram",
+ "title": "Distance",
+ "description": "Distance value converted",
+ "x_unit": "milli-meter",
"type": "number",
}
),
diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py
index 3b490d99710..6b4de2cec03 100644
--- a/packages/models-library/src/models_library/projects_nodes_io.py
+++ b/packages/models-library/src/models_library/projects_nodes_io.py
@@ -1,5 +1,9 @@
"""
- Models used as I/O of Nodes
+ Link models used at i/o port nodes:
+ - Link to files:
+ - Generic: DownloadLink
+ - At Custom Service: SimCoreFileLink, DatCoreFileLink
+ - Link to another port: PortLink
"""
from pathlib import Path
@@ -26,30 +30,46 @@ class PortLink(BaseModel):
node_uuid: NodeID = Field(
...,
description="The node to get the port output from",
- example=["da5068e0-8a8d-4fb9-9516-56e5ddaef15b"],
alias="nodeUuid",
)
output: str = Field(
...,
description="The port key in the node given by nodeUuid",
regex=PROPERTY_KEY_RE,
- example=["out_2"],
)
class Config:
extra = Extra.forbid
+ schema_extra = {
+ "examples": [
+ # minimal
+ {
+ "nodeUuid": "da5068e0-8a8d-4fb9-9516-56e5ddaef15b",
+ "output": "out_2",
+ }
+ ],
+ }
class DownloadLink(BaseModel):
"""I/O port type to hold a generic download link to a file (e.g. S3 pre-signed link, etc)"""
download_link: AnyUrl = Field(..., alias="downloadLink")
- label: Optional[str]
+ label: Optional[str] = None
class Config:
extra = Extra.forbid
+ schema_extra = {
+ "examples": [
+ # minimal
+ {
+ "downloadLink": "https://fakeimg.pl/250x100/",
+ }
+ ],
+ }
+## CUSTOM STORAGE SERVICES -----------
class BaseFileLink(BaseModel):
"""Base class for I/O port types with links to storage services"""
@@ -59,23 +79,17 @@ class BaseFileLink(BaseModel):
store: Union[str, int] = Field(
...,
description="The store identifier: '0' or 0 for simcore S3, '1' or 1 for datcore",
- examples=["0", 1],
)
path: str = Field(
...,
regex=r"^.+$",
description="The path to the file in the storage provider domain",
- examples=[
- "N:package:b05739ef-260c-4038-b47d-0240d04b0599",
- "94453a6a-c8d4-52b3-a22d-ccbf81f8d636/d4442ca4-23fd-5b6b-ba6d-0b75f711c109/y_1D.txt",
- ],
)
label: Optional[str] = Field(
None,
description="The real file name",
- examples=["MyFile.txt"],
)
e_tag: Optional[str] = Field(
@@ -112,9 +126,10 @@ def pre_fill_label_with_filename_ext(cls, v, values):
class Config:
extra = Extra.forbid
schema_extra = {
+ # a project file
"example": {
"store": "0",
- "path": "api/0a3b2c56-dbcd-4871-b93b-d454b7883f9f/input.txt",
+ "path": "94453a6a-c8d4-52b3-a22d-ccbf81f8d636/0a3b2c56-dbcd-4871-b93b-d454b7883f9f/input.txt",
"eTag": "859fda0cb82fc4acb4686510a172d9a9-1",
"label": "input.txt",
},
@@ -123,7 +138,12 @@ class Config:
{
"store": "0",
"path": "api/0a3b2c56-dbcd-4871-b93b-d454b7883f9f/input.txt",
- }
+ },
+ # w/ store id as int
+ {
+ "store": 0,
+ "path": "94453a6a-c8d4-52b3-a22d-ccbf81f8d636/d4442ca4-23fd-5b6b-ba6d-0b75f711c109/y_1D.txt",
+ },
],
}
@@ -134,13 +154,11 @@ class DatCoreFileLink(BaseFileLink):
label: str = Field(
...,
description="The real file name",
- examples=["MyFile.txt"],
)
dataset: str = Field(
...,
description="Unique identifier to access the dataset on datcore (REQUIRED for datcore)",
- example=["N:dataset:f9f5ac51-33ea-4861-8e08-5b4faf655041"],
)
@validator("store", always=True)
@@ -162,4 +180,17 @@ class Config:
"path": "N:package:32df09ba-e8d6-46da-bd54-f696157de6ce",
"label": "initial_WTstates",
},
+ "examples": [
+ # with store id as str
+ {
+ "store": "1",
+ "dataset": "N:dataset:ea2325d8-46d7-4fbd-a644-30f6433070b4",
+ "path": "N:package:32df09ba-e8d6-46da-bd54-f696157de6ce",
+ "label": "initial_WTstates",
+ },
+ ],
}
+
+
+# Bundles all model links to a file vs PortLink
+LinkToFileTypes = Union[SimCoreFileLink, DatCoreFileLink, DownloadLink]
diff --git a/packages/models-library/tests/test_projects_pipeline.py b/packages/models-library/tests/test_projects_pipeline.py
index 828e6504ea9..0cbf054eaca 100644
--- a/packages/models-library/tests/test_projects_pipeline.py
+++ b/packages/models-library/tests/test_projects_pipeline.py
@@ -3,16 +3,20 @@
# pylint:disable=redefined-outer-name
from pprint import pformat
+from typing import Dict, Type
import pytest
from models_library.projects_pipeline import ComputationTask
+from pydantic import BaseModel
@pytest.mark.parametrize(
"model_cls",
(ComputationTask,),
)
-def test_computation_task_model_examples(model_cls, model_cls_examples):
+def test_computation_task_model_examples(
+ model_cls: Type[BaseModel], model_cls_examples: Dict[str, Dict]
+):
for name, example in model_cls_examples.items():
print(name, ":", pformat(example))
model_instance = model_cls(**example)
diff --git a/services/web/server/docker/boot.sh b/services/web/server/docker/boot.sh
index f4b3e530423..4f0762a840d 100755
--- a/services/web/server/docker/boot.sh
+++ b/services/web/server/docker/boot.sh
@@ -29,8 +29,10 @@ elif [ "${SC_BUILD_TARGET}" = "production" ]; then
APP_CONFIG=server-docker-prod.yaml
fi
+APP_LOG_LEVEL=${WEBSERVER_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL}}}
+
# RUNNING application ----------------------------------------
-echo "$INFO" "Selected config $APP_CONFIG"
+echo "$INFO" "Selected config $APP_CONFIG w/ log-level $APP_LOG_LEVEL"
# NOTE: the number of workers ```(2 x $num_cores) + 1``` is
# the official recommendation [https://docs.gunicorn.org/en/latest/design.html#how-many-workers]
@@ -44,7 +46,7 @@ if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then
--worker-class aiohttp.GunicornWebWorker \
--workers="${WEBSERVER_GUNICORN_WORKERS:-1}" \
--name="webserver_$(hostname)_$(date +'%Y-%m-%d_%T')_$$" \
- --log-level="${LOG_LEVEL:-info}" \
+ --log-level="${APP_LOG_LEVEL:-info}" \
--access-logfile='-' \
--access-logformat='%a %t "%r" %s %b [%Dus] "%{Referer}i" "%{User-Agent}i"' \
--reload
@@ -55,7 +57,7 @@ else
--worker-class aiohttp.GunicornWebWorker \
--workers="${WEBSERVER_GUNICORN_WORKERS:-1}" \
--name="webserver_$(hostname)_$(date +'%Y-%m-%d_%T')_$$" \
- --log-level="${LOG_LEVEL:-warning}" \
+ --log-level="${APP_LOG_LEVEL:-warning}" \
--access-logfile='-' \
--access-logformat='%a %t "%r" %s %b [%Dus] "%{Referer}i" "%{User-Agent}i"'
fi
diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in
index 6ef97fb88e6..0001654f78e 100644
--- a/services/web/server/requirements/_base.in
+++ b/services/web/server/requirements/_base.in
@@ -17,67 +17,42 @@
--requirement ../../../../packages/service-library/requirements/_aiohttp.in
-# aiohttp + extensions
-aiohttp
-aiohttp_jinja2
-aiohttp_session[secure]
-aiohttp_security
-aiohttp-swagger[performance]
-gunicorn[setproctitle]
-
-
# web-sockets
# From 5.0.0, https://github.com/miguelgrinberg/python-socketio/blob/main/CHANGES.md
# test_resource_manager.py::test_websocket_resource_management fails because
# socket_id saved in redis does not correspond to client's sio
python-socketio~=4.6.1
-# postgres db
-aiopg[sa]
-asyncpg
-
-# i/o
-aiofiles
-
-# redis
-aioredis
-aioredlock
-
-# RabbitMQ client
-aio-pika
-
-# email
-aiosmtplib
-jinja_app_loader
-
-
-# data models
-pydantic[email]
-
-# security
-cryptography
-passlib
-
-# json
-orjson
-jsondiff
-json2html
-
-# asyncio debug
-aiodebug
+# SEE services/web/server/tests/unit/isolated/test_utils.py::test_yarl_url_compose_changed_with_latest_release
+yarl<1.6
-# misc
+aio-pika # RabbitMQ client
+aiodebug # asyncio debug
+aiofiles # i/o
+aiohttp
+aiohttp_jinja2
+aiohttp_security
+aiohttp_session[secure]
+aiohttp-swagger[performance]
+aiopg[sa] # db
+aioredis # redis
+aioredlock # redis
+aiosmtplib # email
+asyncpg # db
change_case
+cryptography # security
expiringdict
+gunicorn[setproctitle]
+jinja_app_loader # email
+json2html
+jsondiff
+openpyxl # excel
+orjson # json
+parfive # excel
+passlib
+pint # units
+pydantic[email] # models
+python-magic # excel
semantic_version
tenacity
-
-# import/export excel
-parfive
-openpyxl
-python-magic
-
-
-# SEE services/web/server/tests/unit/isolated/test_utils.py::test_yarl_url_compose_changed_with_latest_release
-yarl<1.6
diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt
index 0790af9df8e..b04143a064d 100644
--- a/services/web/server/requirements/_base.txt
+++ b/services/web/server/requirements/_base.txt
@@ -191,12 +191,16 @@ openpyxl==3.0.7
# via -r requirements/_base.in
orjson==3.5.3
# via -r requirements/_base.in
+packaging==21.3
+ # via pint
pamqp==2.3.0
# via aiormq
parfive==1.5.1
# via -r requirements/_base.in
passlib==1.7.4
# via -r requirements/_base.in
+pint==0.18
+ # via -r requirements/_base.in
prometheus-client==0.12.0
# via -r requirements/../../../../packages/service-library/requirements/_aiohttp.in
psycopg2-binary==2.9.1
@@ -225,6 +229,8 @@ pyinstrument==3.4.2
# -r requirements/../../../../packages/service-library/requirements/_base.in
pyinstrument-cext==0.2.4
# via pyinstrument
+pyparsing==3.0.7
+ # via packaging
pyrsistent==0.18.0
# via jsonschema
python-engineio==3.14.2
diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt
index c4e714de956..bd416327db6 100644
--- a/services/web/server/requirements/_test.txt
+++ b/services/web/server/requirements/_test.txt
@@ -159,6 +159,7 @@ openapi-spec-validator==0.3.1
# -r requirements/_test.in
packaging==21.3
# via
+ # -c requirements/_base.txt
# pytest
# pytest-sugar
paramiko==2.8.0
@@ -185,8 +186,10 @@ pylint==2.12.1
# via -r requirements/_test.in
pynacl==1.4.0
# via paramiko
-pyparsing==3.0.6
- # via packaging
+pyparsing==3.0.7
+ # via
+ # -c requirements/_base.txt
+ # packaging
pyrsistent==0.18.0
# via
# -c requirements/_base.txt
diff --git a/services/web/server/src/simcore_service_webserver/catalog.py b/services/web/server/src/simcore_service_webserver/catalog.py
index 46ee55328bc..b86939f08fb 100644
--- a/services/web/server/src/simcore_service_webserver/catalog.py
+++ b/services/web/server/src/simcore_service_webserver/catalog.py
@@ -6,13 +6,14 @@
from aiohttp import web
from aiohttp.web_routedef import RouteDef
+from pint import UnitRegistry
from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup
from servicelib.aiohttp.rest_routing import iter_path_operations
from . import catalog_handlers
from ._constants import APP_OPENAPI_SPECS_KEY
from .catalog_client import get_services_for_user_in_product, is_service_responsive
-from .catalog_handlers_utils import reverse_proxy_handler
+from .catalog_utils import reverse_proxy_handler
logger = logging.getLogger(__name__)
@@ -48,6 +49,9 @@ def setup_catalog(app: web.Application):
# reverse proxy to catalog's API
app.router.add_routes(routes)
+ # prepares units registry
+ app[UnitRegistry.__name__] = UnitRegistry()
+
__all__: Tuple[str] = (
"get_services_for_user_in_product",
diff --git a/services/web/server/src/simcore_service_webserver/catalog_handlers.py b/services/web/server/src/simcore_service_webserver/catalog_handlers.py
index 1bd1075c226..1a1b5abe771 100644
--- a/services/web/server/src/simcore_service_webserver/catalog_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/catalog_handlers.py
@@ -6,21 +6,23 @@
from aiohttp import web
from aiohttp.web import Request, RouteTableDef
from models_library.services import ServiceInput, ServiceOutput
+from pint import UnitRegistry
from pydantic import ValidationError
from . import catalog_client
from ._constants import RQ_PRODUCT_KEY
from ._meta import api_version_prefix
from .catalog_models import (
- ServiceInputApiOut,
+ ServiceInputGet,
ServiceInputKey,
ServiceKey,
- ServiceOutputApiOut,
+ ServiceOutputGet,
ServiceOutputKey,
ServiceVersion,
json_dumps,
replace_service_input_outputs,
)
+from .catalog_utils import can_connect
from .login.decorators import RQT_USERID_KEY, login_required
from .rest_constants import RESPONSE_MODEL_POLICY
from .security_decorators import permission_required
@@ -44,6 +46,7 @@ class _RequestContext:
app: web.Application
user_id: int
product_name: str
+ unit_registry: UnitRegistry
@classmethod
def create(cls, request: Request) -> "_RequestContext":
@@ -51,6 +54,7 @@ def create(cls, request: Request) -> "_RequestContext":
app=request.app,
user_id=request[RQT_USERID_KEY],
product_name=request[RQ_PRODUCT_KEY],
+ unit_registry=request.app[UnitRegistry.__name__],
)
@@ -280,56 +284,14 @@ async def get_compatible_outputs_given_target_input_handler(request: Request):
#
-def can_connect(
- from_output: ServiceOutput, to_input: ServiceInput, *, strict: bool = False
-) -> bool:
- # FIXME: can_connect is a very very draft version
-
- # compatible units
- ok = from_output.unit == to_input.unit
- if ok:
- # compatible types
- # FIXME: see mimetypes examples in property_type
- #
- # "pattern": "^(number|integer|boolean|string|data:([^/\\s,]+/[^/\\s,]+|\\[[^/\\s,]+/[^/\\s,]+(,[^/\\s]+/[^/,\\s]+)*\\]))$",
- # "description": "data type expected on this input glob matching for data type is allowed",
- # "examples": [
- # "number",
- # "boolean",
- # "data:*/*",
- # "data:text/*",
- # "data:[image/jpeg,image/png]",
- # "data:application/json",
- # "data:application/json;schema=https://my-schema/not/really/schema.json",
- # "data:application/vnd.ms-excel",
- # "data:text/plain",
- # "data:application/hdf5",
- # "data:application/edu.ucdavis@ceclancy.xyz"
- #
- ok = from_output.property_type == to_input.property_type
- if not ok:
- ok = (
- # data: -> data:*/*
- to_input.property_type == "data:*/*"
- and from_output.property_type.startswith("data:")
- )
-
- if not strict:
- # NOTE: by default, this is allowed in the UI but not in a more strict plausibility check
- # data:*/* -> data:
- ok |= (
- from_output.property_type == "data:*/*"
- and to_input.property_type.startswith("data:")
- )
- return ok
-
-
async def list_services(ctx: _RequestContext):
services = await catalog_client.get_services_for_user_in_product(
ctx.app, ctx.user_id, ctx.product_name, only_key_versions=False
)
for service in services:
- replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY)
+ replace_service_input_outputs(
+ service, unit_registry=ctx.unit_registry, **RESPONSE_MODEL_POLICY
+ )
return services
@@ -339,7 +301,9 @@ async def get_service(
service = await catalog_client.get_service(
ctx.app, ctx.user_id, service_key, service_version, ctx.product_name
)
- replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY)
+ replace_service_input_outputs(
+ service, unit_registry=ctx.unit_registry, **RESPONSE_MODEL_POLICY
+ )
return service
@@ -357,21 +321,24 @@ async def update_service(
ctx.product_name,
update_data,
)
- replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY)
+ replace_service_input_outputs(
+ service, unit_registry=ctx.unit_registry, **RESPONSE_MODEL_POLICY
+ )
return service
async def list_service_inputs(
service_key: ServiceKey, service_version: ServiceVersion, ctx: _RequestContext
-) -> List[ServiceOutputApiOut]:
+) -> ServiceOutputGet:
service = await catalog_client.get_service(
ctx.app, ctx.user_id, service_key, service_version, ctx.product_name
)
-
inputs = []
for input_key in service["inputs"].keys():
- service_input = ServiceInputApiOut.from_catalog_service(service, input_key)
+ service_input = ServiceInputGet.from_catalog_service_api_model(
+ service, input_key
+ )
inputs.append(service_input)
return inputs
@@ -381,12 +348,12 @@ async def get_service_input(
service_version: ServiceVersion,
input_key: ServiceInputKey,
ctx: _RequestContext,
-) -> ServiceInputApiOut:
+) -> ServiceInputGet:
service = await catalog_client.get_service(
ctx.app, ctx.user_id, service_key, service_version, ctx.product_name
)
- service_input = ServiceInputApiOut.from_catalog_service(service, input_key)
+ service_input = ServiceInputGet.from_catalog_service_api_model(service, input_key)
return service_input
@@ -427,7 +394,7 @@ def iter_service_inputs() -> Iterator[Tuple[ServiceInputKey, ServiceInput]]:
# check
matches = []
for key_id, to_input in iter_service_inputs():
- if can_connect(from_output, to_input):
+ if can_connect(from_output, to_input, units_registry=ctx.unit_registry):
matches.append(key_id)
return matches
@@ -437,14 +404,16 @@ async def list_service_outputs(
service_key: ServiceKey,
service_version: ServiceVersion,
ctx: _RequestContext,
-) -> List[ServiceOutputApiOut]:
+) -> List[ServiceOutputGet]:
service = await catalog_client.get_service(
ctx.app, ctx.user_id, service_key, service_version, ctx.product_name
)
outputs = []
for output_key in service["outputs"].keys():
- service_output = ServiceOutputApiOut.from_catalog_service(service, output_key)
+ service_output = ServiceOutputGet.from_catalog_service_api_model(
+ service, output_key
+ )
outputs.append(service_output)
return outputs
@@ -454,11 +423,13 @@ async def get_service_output(
service_version: ServiceVersion,
output_key: ServiceOutputKey,
ctx: _RequestContext,
-) -> ServiceOutputApiOut:
+) -> ServiceOutputGet:
service = await catalog_client.get_service(
ctx.app, ctx.user_id, service_key, service_version, ctx.product_name
)
- service_output = ServiceOutputApiOut.from_catalog_service(service, output_key)
+ service_output = ServiceOutputGet.from_catalog_service_api_model(
+ service, output_key
+ )
return service_output
@@ -492,7 +463,7 @@ def iter_service_outputs() -> Iterator[Tuple[ServiceOutputKey, ServiceOutput]]:
# check
matches = []
for key_id, from_output in iter_service_outputs():
- if can_connect(from_output, to_input):
+ if can_connect(from_output, to_input, units_registry=ctx.unit_registry):
matches.append(key_id)
return matches
diff --git a/services/web/server/src/simcore_service_webserver/catalog_handlers_utils.py b/services/web/server/src/simcore_service_webserver/catalog_handlers_utils.py
deleted file mode 100644
index 131ea6736ba..00000000000
--- a/services/web/server/src/simcore_service_webserver/catalog_handlers_utils.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import logging
-from typing import Optional
-
-from aiohttp import web
-from yarl import URL
-
-from . import catalog_client
-from ._constants import RQ_PRODUCT_KEY, X_PRODUCT_NAME_HEADER
-from .catalog_client import to_backend_service
-from .catalog_settings import get_plugin_settings
-from .login.decorators import RQT_USERID_KEY, login_required
-from .security_decorators import permission_required
-
-logger = logging.getLogger(__name__)
-
-
-@login_required
-@permission_required("services.catalog.*")
-async def reverse_proxy_handler(request: web.Request) -> web.Response:
- """
- - Adds auth layer
- - Adds access layer
- - Forwards request to catalog service
-
- SEE https://gist.github.com/barrachri/32f865c4705f27e75d3b8530180589fb
- """
- user_id = request[RQT_USERID_KEY]
- settings = get_plugin_settings(request.app)
-
- # path & queries
- backend_url = to_backend_service(
- request.rel_url,
- URL(settings.base_url),
- settings.CATALOG_VTAG,
- )
- # FIXME: hack
- if "/services" in backend_url.path:
- backend_url = backend_url.update_query({"user_id": user_id})
- logger.debug("Redirecting '%s' -> '%s'", request.url, backend_url)
-
- # body
- raw: Optional[bytes] = None
- if request.can_read_body:
- raw = await request.read()
-
- # injects product discovered by middleware in headers
- fwd_headers = request.headers.copy()
- product_name = request[RQ_PRODUCT_KEY]
- fwd_headers.update({X_PRODUCT_NAME_HEADER: product_name})
-
- # forward request
- return await catalog_client.make_request_and_envelope_response(
- request.app, request.method, backend_url, fwd_headers, raw
- )
diff --git a/services/web/server/src/simcore_service_webserver/catalog_models.py b/services/web/server/src/simcore_service_webserver/catalog_models.py
index df498b72d1d..b7af15161a3 100644
--- a/services/web/server/src/simcore_service_webserver/catalog_models.py
+++ b/services/web/server/src/simcore_service_webserver/catalog_models.py
@@ -8,9 +8,11 @@
ServiceInput,
ServiceOutput,
)
+from pint import UnitRegistry
from pydantic import Extra, Field, constr
from pydantic.main import BaseModel
+from .catalog_utils import UnitHtmlFormat, get_html_formatted_unit
from .utils import snake_to_camel
ServiceKey = constr(regex=KEY_RE)
@@ -19,24 +21,6 @@
ServiceOutputKey = PropertyName
-# TODO: will be replaced by pynt functionality
-FAKE_UNIT_TO_FORMATS = {"SECOND": ("s", "seconds"), "METER": ("m", "meters")}
-
-
-class CannotFormatUnitError(ValueError):
- """Either unit is not provided or is invalid or is not registered"""
-
-
-def get_formatted_unit(data: dict):
- try:
- unit = data["unit"]
- if unit is None:
- raise CannotFormatUnitError()
- return FAKE_UNIT_TO_FORMATS[unit.upper()]
- except KeyError as err:
- raise CannotFormatUnitError() from err
-
-
def json_dumps(v, *, default=None) -> str:
# orjson.dumps returns bytes, to match standard json.dumps we need to decode
return orjson.dumps(v, default=default).decode()
@@ -56,10 +40,12 @@ def json_dumps(v, *, default=None) -> str:
# TODO: reduce to a minimum returned input/output models (ask OM)
class _BaseCommonApiExtension(BaseModel):
unit_long: Optional[str] = Field(
- None, description="Long name of the unit, if available"
+ None,
+ description="Long name of the unit for display (html-compatible), if available",
)
unit_short: Optional[str] = Field(
- None, description="Short name for the unit, if available"
+ None,
+ description="Short name for the unit for display (html-compatible), if available",
)
class Config:
@@ -69,7 +55,7 @@ class Config:
json_dumps = json_dumps
-class ServiceInputApiOut(ServiceInput, _BaseCommonApiExtension):
+class ServiceInputGet(ServiceInput, _BaseCommonApiExtension):
key_id: ServiceInputKey = Field(
..., description="Unique name identifier for this input"
)
@@ -87,20 +73,48 @@ class Config(_BaseCommonApiExtension.Config):
"keyId": "input_2",
"unitLong": "seconds",
"unitShort": "sec",
- }
+ },
+ "examples": [
+ # uses content-schema
+ {
+ "label": "Acceleration",
+ "description": "acceleration with units",
+ "type": "ref_contentSchema",
+ "contentSchema": {
+ "title": "Acceleration",
+ "type": "number",
+ "x_unit": "m/s**2",
+ },
+ "keyId": "input_1",
+ "unitLong": "meter/second3",
+ "unitShort": "m/s3",
+ }
+ ],
}
@classmethod
- def from_catalog_service(cls, service: Dict[str, Any], input_key: ServiceInputKey):
+ def from_catalog_service_api_model(
+ cls,
+ service: Dict[str, Any],
+ input_key: ServiceInputKey,
+ ureg: Optional[UnitRegistry] = None,
+ ):
data = service["inputs"][input_key]
- try:
- ushort, ulong = get_formatted_unit(data)
- return cls(keyId=input_key, unitLong=ulong, unitShort=ushort, **data)
- except CannotFormatUnitError:
- return cls(keyId=input_key, **data)
+ port = cls(keyId=input_key, **data) # validated!
+ unit_html: UnitHtmlFormat
+
+ if ureg and (unit_html := get_html_formatted_unit(port, ureg)):
+ # we know data is ok since it was validated above
+ return cls.construct(
+ keyId=input_key,
+ unitLong=unit_html.long,
+ unitShort=unit_html.short,
+ **data,
+ )
+ return port
-class ServiceOutputApiOut(ServiceOutput, _BaseCommonApiExtension):
+class ServiceOutputGet(ServiceOutput, _BaseCommonApiExtension):
key_id: ServiceOutputKey = Field(
..., description="Unique name identifier for this input"
)
@@ -120,21 +134,30 @@ class Config(_BaseCommonApiExtension.Config):
}
@classmethod
- def from_catalog_service(
- cls, service: Dict[str, Any], output_key: ServiceOutputKey
+ def from_catalog_service_api_model(
+ cls,
+ service: Dict[str, Any],
+ output_key: ServiceOutputKey,
+ ureg: Optional[UnitRegistry] = None,
):
data = service["outputs"][output_key]
+ # NOTE: prunes invalid field that might have remained in database
+ # TODO: remove from root and remove this cleanup operation
+ if "defaultValue" in data:
+ data.pop("defaultValue")
- try:
- # NOTE: prunes invalid field that might have remained in database
- # TODO: remove from root and remove this cleanup operation
- if "defaultValue" in data:
- data.pop("defaultValue")
+ port = cls(keyId=output_key, **data) # validated
- ushort, ulong = get_formatted_unit(data)
- return cls(keyId=output_key, unitLong=ulong, unitShort=ushort, **data)
- except CannotFormatUnitError:
- return cls(keyId=output_key, **data)
+ unit_html: UnitHtmlFormat
+ if ureg and (unit_html := get_html_formatted_unit(port, ureg)):
+ # we know data is ok since it was validated above
+ return cls.construct(
+ keyId=output_key,
+ unitLong=unit_html.long,
+ unitShort=unit_html.short,
+ **data,
+ )
+ return port
#######################
@@ -142,14 +165,23 @@ def from_catalog_service(
#
-def replace_service_input_outputs(service: Dict[str, Any], **export_options):
+def replace_service_input_outputs(
+ service: Dict[str, Any],
+ *,
+ unit_registry: Optional[UnitRegistry] = None,
+ **export_options,
+):
"""Thin wrapper to replace i/o ports in returned service model"""
# This is a fast solution until proper models are available for the web API
for input_key in service["inputs"]:
- new_input = ServiceInputApiOut.from_catalog_service(service, input_key)
+ new_input = ServiceInputGet.from_catalog_service_api_model(
+ service, input_key, unit_registry
+ )
service["inputs"][input_key] = new_input.dict(**export_options)
for output_key in service["outputs"]:
- new_output = ServiceOutputApiOut.from_catalog_service(service, output_key)
+ new_output = ServiceOutputGet.from_catalog_service_api_model(
+ service, output_key, unit_registry
+ )
service["outputs"][output_key] = new_output.dict(**export_options)
diff --git a/services/web/server/src/simcore_service_webserver/catalog_utils.py b/services/web/server/src/simcore_service_webserver/catalog_utils.py
new file mode 100644
index 00000000000..bab64ce5d02
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/catalog_utils.py
@@ -0,0 +1,177 @@
+import logging
+from dataclasses import dataclass
+from typing import Optional
+
+from aiohttp import web
+from models_library.services import BaseServiceIOModel, ServiceInput, ServiceOutput
+from pint import PintError, UnitRegistry
+from yarl import URL
+
+from . import catalog_client
+from ._constants import RQ_PRODUCT_KEY, X_PRODUCT_NAME_HEADER
+from .catalog_client import to_backend_service
+from .catalog_settings import get_plugin_settings
+from .login.decorators import RQT_USERID_KEY, login_required
+from .security_decorators import permission_required
+
+logger = logging.getLogger(__name__)
+
+
+@login_required
+@permission_required("services.catalog.*")
+async def reverse_proxy_handler(request: web.Request) -> web.Response:
+ """
+ - Adds auth layer
+ - Adds access layer
+ - Forwards request to catalog service
+
+ SEE https://gist.github.com/barrachri/32f865c4705f27e75d3b8530180589fb
+ """
+ user_id = request[RQT_USERID_KEY]
+ settings = get_plugin_settings(request.app)
+
+ # path & queries
+ backend_url = to_backend_service(
+ request.rel_url,
+ URL(settings.base_url),
+ settings.CATALOG_VTAG,
+ )
+ # FIXME: hack
+ if "/services" in backend_url.path:
+ backend_url = backend_url.update_query({"user_id": user_id})
+ logger.debug("Redirecting '%s' -> '%s'", request.url, backend_url)
+
+ # body
+ raw: Optional[bytes] = None
+ if request.can_read_body:
+ raw = await request.read()
+
+ # injects product discovered by middleware in headers
+ fwd_headers = request.headers.copy()
+ product_name = request[RQ_PRODUCT_KEY]
+ fwd_headers.update({X_PRODUCT_NAME_HEADER: product_name})
+
+ # forward request
+ return await catalog_client.make_request_and_envelope_response(
+ request.app, request.method, backend_url, fwd_headers, raw
+ )
+
+
+## MODELS UTILS ---------------------------------
+
+
+def _get_unit_name(port: BaseServiceIOModel) -> str:
+ unit = port.unit
+ if port.property_type == "ref_contentSchema":
+ assert port.content_schema is not None # nosec
+ unit = port.content_schema.get("x_unit", unit)
+ if unit:
+ # TODO: Review this convention under dev: x_units
+ # has a special format for prefix. tmp direct replace here
+ unit = unit.replace("-", "")
+ elif port.content_schema["type"] in ("object", "array"):
+ # these objects might have unit in its fields
+ raise NotImplementedError
+ return unit
+
+
+def _get_type_name(port: BaseServiceIOModel) -> str:
+ _type = port.property_type
+ if port.property_type == "ref_contentSchema":
+ assert port.content_schema is not None # nosec
+ _type = port.content_schema["type"]
+ return _type
+
+
+@dataclass
+class UnitHtmlFormat:
+ short: str
+ long: str
+
+
+def get_html_formatted_unit(
+ port: BaseServiceIOModel, ureg: UnitRegistry
+) -> Optional[UnitHtmlFormat]:
+ try:
+ unit_name = _get_unit_name(port)
+ if unit_name is None:
+ return None
+
+ q = ureg.Quantity(unit_name)
+ return UnitHtmlFormat(short=f"{q.units:~H}", long=f"{q.units:H}")
+ except (PintError, NotImplementedError):
+ return None
+
+
+## PORT COMPATIBILITY ---------------------------------
+
+
+def _can_convert_units(from_unit: str, to_unit: str, ureg: UnitRegistry) -> bool:
+ assert from_unit # nosec
+ assert to_unit # nosec
+
+ # TODO: optimize by caching? ureg already caches?
+ # TODO: symmetric
+ try:
+ return ureg.Quantity(from_unit).check(to_unit)
+ except (TypeError, PintError):
+ return False
+
+
+def can_connect(
+ from_output: ServiceOutput,
+ to_input: ServiceInput,
+ units_registry: UnitRegistry,
+) -> bool:
+ """Checks compatibility between ports.
+
+ This check IS PERMISSIVE and is used for checks in the UI where one needs to give some "flexibility" since:
+ - has to be a fast evaluation
+ - there are no error messages when check fails
+ - some configurations might need several UI steps to be valid
+
+ For more strict checks use the "strict" variant
+ """
+ # types check
+ from_type = _get_type_name(from_output)
+ to_type = _get_type_name(to_input)
+
+ ok = (
+ from_type == to_type
+ or (
+ # data: -> data:*/*
+ to_type == "data:*/*"
+ and from_type.startswith("data:")
+ )
+ or (
+ # NOTE: by default, this is allowed in the UI but not in a more strict plausibility check
+ # data:*/* -> data:
+ from_type == "data:*/*"
+ and to_type.startswith("data:")
+ )
+ )
+
+ if any(t in ("object", "array") for t in (from_type, to_type)):
+ # Not Implemented but this if e.g. from_type == to_type that should be the answer
+ # TODO: from_type subset of to_type is the right way resolve this check
+ return ok
+
+ # types units
+ if ok:
+ try:
+ from_unit = _get_unit_name(from_output)
+ to_unit = _get_unit_name(to_input)
+ except NotImplementedError:
+ return ok
+
+ ok = ok and (
+ from_unit == to_unit
+ # unitless -> *
+ or from_unit is None
+ # * -> unitless
+ or to_unit is None
+ # from_unit -> unit
+ or _can_convert_units(from_unit, to_unit, units_registry)
+ )
+
+ return ok
diff --git a/services/web/server/tests/unit/isolated/test_catalog_handlers.py b/services/web/server/tests/unit/isolated/test_catalog_handlers.py
deleted file mode 100644
index d1fd071f94c..00000000000
--- a/services/web/server/tests/unit/isolated/test_catalog_handlers.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from models_library.services import ServiceInput, ServiceOutput
-from simcore_service_webserver.catalog_handlers import can_connect
-
-
-def test_can_connect():
- # Reproduces https://github.com/ITISFoundation/osparc-issues/issues/442
- file_picker_outfile = {
- "displayOrder": 2,
- "label": "File Picker",
- "description": "Picker",
- "type": "data:*/*",
- }
-
- input_sleeper_input_1 = {
- "displayOrder": 1,
- "label": "Sleeper",
- "description": "sleeper input file",
- "type": "data:text/plain",
- }
-
- # data:*/* -> data:text/plain
- assert can_connect(
- from_output=ServiceOutput.parse_obj(file_picker_outfile),
- to_input=ServiceInput.parse_obj(input_sleeper_input_1),
- )
- assert not can_connect(
- from_output=ServiceOutput.parse_obj(file_picker_outfile),
- to_input=ServiceInput.parse_obj(input_sleeper_input_1),
- strict=True,
- )
-
- # data:text/plain -> data:*/*
- assert can_connect(
- from_output=ServiceOutput.parse_obj(input_sleeper_input_1),
- to_input=ServiceInput.parse_obj(file_picker_outfile),
- )
- assert can_connect(
- from_output=ServiceOutput.parse_obj(input_sleeper_input_1),
- to_input=ServiceInput.parse_obj(file_picker_outfile),
- strict=True,
- )
diff --git a/services/web/server/tests/unit/isolated/test_catalog_models.py b/services/web/server/tests/unit/isolated/test_catalog_models.py
index bdc30b9e544..c9e78449db3 100644
--- a/services/web/server/tests/unit/isolated/test_catalog_models.py
+++ b/services/web/server/tests/unit/isolated/test_catalog_models.py
@@ -1,29 +1,38 @@
-# pylint:disable=unused-argument
-# pylint:disable=redefined-outer-name
-# pylint:disable=no-name-in-module
-
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
import json
from copy import deepcopy
from pprint import pformat
+from typing import Any, Dict, Mapping, Type
import pytest
+from pint import UnitRegistry
+from pydantic import BaseModel
from simcore_service_webserver.catalog_handlers import RESPONSE_MODEL_POLICY
from simcore_service_webserver.catalog_models import (
- ServiceInputApiOut,
- ServiceOutputApiOut,
+ ServiceInputGet,
+ ServiceOutputGet,
replace_service_input_outputs,
)
+@pytest.fixture(scope="module")
+def unit_registry():
+ return UnitRegistry()
+
+
@pytest.mark.parametrize(
"model_cls",
(
- ServiceInputApiOut,
- ServiceOutputApiOut,
+ ServiceInputGet,
+ ServiceOutputGet,
),
)
-def test_webserver_catalog_api_models(model_cls, model_cls_examples):
+def test_webserver_catalog_api_models(
+ model_cls: Type[BaseModel], model_cls_examples: Dict[str, Mapping[str, Any]]
+):
for name, example in model_cls_examples.items():
print(name, ":", pformat(example))
model_instance = model_cls(**example)
@@ -34,7 +43,7 @@ def test_webserver_catalog_api_models(model_cls, model_cls_examples):
assert model_cls(**data) == model_instance
-def test_from_catalog_to_webapi_service():
+def test_from_catalog_to_webapi_service(unit_registry: UnitRegistry):
# Taken from services/catalog/src/simcore_service_catalog/models/schemas/services.py on Feb.2021
catalog_service = {
@@ -73,7 +82,7 @@ def test_from_catalog_to_webapi_service():
"outFile": {
"displayOrder": 0,
"label": "File",
- "unit": "second",
+ "unit": "sec",
"description": "Chosen File",
"type": "data:*/*",
"fileToKeyMap": None,
@@ -85,14 +94,16 @@ def test_from_catalog_to_webapi_service():
}
webapi_service = deepcopy(catalog_service)
- replace_service_input_outputs(webapi_service, **RESPONSE_MODEL_POLICY)
+ replace_service_input_outputs(
+ webapi_service, unit_registry=unit_registry, **RESPONSE_MODEL_POLICY
+ )
print(json.dumps(webapi_service, indent=2))
# If units are defined, I want unitShort and unitLong
- assert webapi_service["outputs"]["outFile"]["unit"] is "second"
- assert webapi_service["outputs"]["outFile"]["unitShort"] is "s"
- assert webapi_service["outputs"]["outFile"]["unitLong"] is "seconds"
+ assert webapi_service["outputs"]["outFile"]["unit"] == "sec"
+ assert webapi_service["outputs"]["outFile"]["unitShort"] == "s"
+ assert webapi_service["outputs"]["outFile"]["unitLong"] == "second"
# if units are NOT defined => must NOT set Long/Short units
fields = set(webapi_service["inputs"]["uno"].keys())
diff --git a/services/web/server/tests/unit/isolated/test_catalog_utils.py b/services/web/server/tests/unit/isolated/test_catalog_utils.py
new file mode 100644
index 00000000000..01da0148a8e
--- /dev/null
+++ b/services/web/server/tests/unit/isolated/test_catalog_utils.py
@@ -0,0 +1,216 @@
+# pylint: disable=redefined-outer-name
+# pylint: disable=unused-argument
+# pylint: disable=unused-variable
+
+import itertools
+from typing import Any, Dict
+
+import pytest
+from models_library.function_service import demo_units
+from models_library.services import ServiceInput, ServiceOutput
+from pint import UnitRegistry
+from pydantic import Field, create_model
+from simcore_service_webserver.catalog_utils import can_connect
+
+# HELPERS ----------
+
+
+def create_port_data(schema: Dict[str, Any]):
+ description = schema.pop("description", schema["title"])
+
+ return {
+ "label": schema["title"],
+ "description": description,
+ "type": "ref_contentSchema",
+ "contentSchema": schema,
+ }
+
+
+def upgrade_port_data(old_port) -> Dict[str, Any]:
+ _type = old_port["type"]
+ if _type in ("number", "integer", "string"):
+ # creates schema from old data
+ title = old_port["label"].upper()
+ field_kwargs = {"description": old_port["description"]}
+ if unit := old_port.get("unit"):
+ field_kwargs["x_unit"] = unit
+ python_type = {"number": float, "integer": int, "string": str}
+ schema = create_model(
+ title, __root__=(python_type[_type], Field(..., **field_kwargs))
+ ).schema_json(indent=1)
+ return create_port_data(schema)
+ return old_port
+
+
+# FIXTURES -----------------
+
+
+@pytest.fixture(scope="module")
+def unit_registry():
+ return UnitRegistry()
+
+
+# TESTS -----------------
+
+
+def test_can_connect_for_gh_osparc_issues_442(unit_registry: UnitRegistry):
+ # Reproduces https://github.com/ITISFoundation/osparc-issues/issues/442
+ file_picker_outfile = {
+ "displayOrder": 2,
+ "label": "File Picker",
+ "description": "Picker",
+ "type": "data:*/*",
+ }
+
+ input_sleeper_input_1 = {
+ "displayOrder": 1,
+ "label": "Sleeper",
+ "description": "sleeper input file",
+ "type": "data:text/plain",
+ }
+
+ # data:*/* -> data:text/plain
+ assert can_connect(
+ from_output=ServiceOutput.parse_obj(file_picker_outfile),
+ to_input=ServiceInput.parse_obj(input_sleeper_input_1),
+ units_registry=unit_registry,
+ )
+
+ # data:text/plain -> data:*/*
+ assert can_connect(
+ from_output=ServiceOutput.parse_obj(input_sleeper_input_1),
+ to_input=ServiceInput.parse_obj(file_picker_outfile),
+ units_registry=unit_registry,
+ )
+
+
+PORTS_WITH_UNITS = [
+ {
+ "label": "port_W/O_old",
+ "description": "output w/o unit old format",
+ "type": "integer",
+ },
+ create_port_data(
+ {
+ "title": "port-W/O",
+ "description": "output w/o unit",
+ "type": "integer",
+ }
+ ),
+]
+
+PORTS_WITHOUT_UNITS = [
+ {
+ "label": "port_W/_old",
+ "description": "port w/ unit old format",
+ "type": "integer",
+ "unit": "m", # <---
+ },
+ create_port_data(
+ {
+ "title": "port-W/",
+ "description": "port w/ unit",
+ "type": "integer",
+ "x_unit": "cm", # <---
+ }
+ ),
+]
+
+
+@pytest.mark.parametrize(
+ "port_without_unit, port_with_unit",
+ itertools.product(PORTS_WITHOUT_UNITS, PORTS_WITH_UNITS),
+ ids=lambda l: l["label"],
+)
+def test_can_connect_no_units_with_units(
+ port_without_unit, port_with_unit, unit_registry: UnitRegistry
+):
+ # w/o -> w
+ assert can_connect(
+ from_output=ServiceOutput.parse_obj(port_without_unit),
+ to_input=ServiceInput.parse_obj(port_with_unit),
+ units_registry=unit_registry,
+ )
+
+ # w -> w/o
+ assert can_connect(
+ from_output=ServiceOutput.parse_obj(port_with_unit),
+ to_input=ServiceInput.parse_obj(port_without_unit),
+ units_registry=unit_registry,
+ )
+
+
+@pytest.mark.parametrize(
+ "from_unit, to_unit, are_compatible",
+ [
+ ("cm", "mm", True),
+ ("m", "cm", True),
+ ("cm", "miles", True),
+ ("foot", "cm", True),
+ ("cm", "degrees", False),
+ ("cm", None, True),
+ (None, "cm", True),
+ ],
+)
+def test_units_compatible(
+ from_unit, to_unit, are_compatible, unit_registry: UnitRegistry
+):
+ #
+ # TODO: does it make sense to have a string or bool with x_unit??
+ #
+
+ from_port = create_port_data(
+ {
+ "title": "src",
+ "description": "source port",
+ "type": "number",
+ "x_unit": from_unit,
+ }
+ )
+ to_port = create_port_data(
+ {
+ "title": "dst",
+ "description": "destination port",
+ "type": "number",
+ "x_unit": to_unit,
+ }
+ )
+
+ assert (
+ can_connect(
+ from_output=ServiceOutput.parse_obj(from_port),
+ to_input=ServiceInput.parse_obj(to_port),
+ units_registry=unit_registry,
+ )
+ == are_compatible
+ )
+
+
+@pytest.mark.parametrize(
+ "from_port,to_port",
+ itertools.product(
+ demo_units.META.outputs.values(), demo_units.META.inputs.values()
+ ),
+ ids=lambda p: p.label,
+)
+def test_can_connect_with_units(
+ from_port: ServiceOutput, to_port: ServiceInput, unit_registry: UnitRegistry
+):
+ # WARNING: assumes the following convention for the fixture data:
+ # - two ports are compatible if they have the same title
+ #
+ # NOTE: this assumption will probably break when the demo_units service
+ # is modified. At that point, please create a fixture in this test-suite
+ # and copy&paste inputs/outputs above
+ are_compatible = (
+ from_port.content_schema["title"] == to_port.content_schema["title"]
+ )
+
+ assert (
+ can_connect(
+ from_output=from_port,
+ to_input=to_port,
+ units_registry=unit_registry,
+ )
+ == are_compatible
+ )