diff --git a/src/diracx/client/aio/operations/_operations.py b/src/diracx/client/aio/operations/_operations.py index 9c3ec627..a1c87d68 100644 --- a/src/diracx/client/aio/operations/_operations.py +++ b/src/diracx/client/aio/operations/_operations.py @@ -37,9 +37,11 @@ build_jobs_delete_bulk_jobs_request, build_jobs_get_job_status_bulk_request, build_jobs_get_job_status_history_bulk_request, + build_jobs_get_sandbox_file_request, build_jobs_get_single_job_request, build_jobs_get_single_job_status_history_request, build_jobs_get_single_job_status_request, + build_jobs_initiate_sandbox_upload_request, build_jobs_kill_bulk_jobs_request, build_jobs_search_request, build_jobs_set_job_status_bulk_request, @@ -819,6 +821,198 @@ def __init__(self, *args, **kwargs) -> None: input_args.pop(0) if input_args else kwargs.pop("deserializer") ) + @distributed_trace_async + async def get_sandbox_file( + self, *, pfn: str, **kwargs: Any + ) -> _models.SandboxDownloadResponse: + """Get Sandbox File. + + Get a presigned URL to download a sandbox file + + This route cannot use a redirect response most clients will also send the + authorization header when following a redirect. This is not desirable as + it would leak the authorization token to the storage backend. Additionally, + most storage backends return an error when they receive an authorization + header for a presigned URL. + + :keyword pfn: Required. + :paramtype pfn: str + :return: SandboxDownloadResponse + :rtype: ~client.models.SandboxDownloadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SandboxDownloadResponse] = kwargs.pop("cls", None) + + request = build_jobs_get_sandbox_file_request( + pfn=pfn, + headers=_headers, + params=_params, + ) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("SandboxDownloadResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + @overload + async def initiate_sandbox_upload( + self, + body: _models.SandboxInfo, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: ~client.models.SandboxInfo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def initiate_sandbox_upload( + self, body: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def initiate_sandbox_upload( + self, body: Union[_models.SandboxInfo, IO], **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Is either a SandboxInfo type or a IO type. Required. + :type body: ~client.models.SandboxInfo or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[_models.SandboxUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "SandboxInfo") + + request = build_jobs_initiate_sandbox_upload_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("SandboxUploadResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + @overload async def submit_bulk_jobs( self, body: List[str], *, content_type: str = "application/json", **kwargs: Any diff --git a/src/diracx/client/models/__init__.py b/src/diracx/client/models/__init__.py index 893d7c15..205f5098 100644 --- a/src/diracx/client/models/__init__.py +++ b/src/diracx/client/models/__init__.py @@ -16,6 +16,9 @@ from ._models import JobSummaryParams from ._models import JobSummaryParamsSearchItem from ._models import LimitedJobStatusReturn +from ._models import SandboxDownloadResponse +from ._models import SandboxInfo +from ._models import SandboxUploadResponse from ._models import ScalarSearchSpec from ._models import SetJobStatusReturn from ._models import SortSpec @@ -26,14 +29,16 @@ from ._models import ValidationErrorLocItem from ._models import VectorSearchSpec +from ._enums import ChecksumAlgorithm from ._enums import Enum0 from ._enums import Enum1 +from ._enums import Enum10 +from ._enums import Enum11 from ._enums import Enum2 from ._enums import Enum3 from ._enums import Enum4 -from ._enums import Enum8 -from ._enums import Enum9 from ._enums import JobStatus +from ._enums import SandboxFormat from ._enums import ScalarSearchOperator from ._enums import VectorSearchOperator from ._patch import __all__ as _patch_all @@ -53,6 +58,9 @@ "JobSummaryParams", "JobSummaryParamsSearchItem", "LimitedJobStatusReturn", + "SandboxDownloadResponse", + "SandboxInfo", + "SandboxUploadResponse", "ScalarSearchSpec", "SetJobStatusReturn", "SortSpec", @@ -62,14 +70,16 @@ "ValidationError", "ValidationErrorLocItem", "VectorSearchSpec", + "ChecksumAlgorithm", "Enum0", "Enum1", + "Enum10", + "Enum11", "Enum2", "Enum3", "Enum4", - "Enum8", - "Enum9", "JobStatus", + "SandboxFormat", "ScalarSearchOperator", "VectorSearchOperator", ] diff --git a/src/diracx/client/models/_enums.py b/src/diracx/client/models/_enums.py index ccabc77c..60628b59 100644 --- a/src/diracx/client/models/_enums.py +++ b/src/diracx/client/models/_enums.py @@ -8,6 +8,12 @@ from azure.core import CaseInsensitiveEnumMeta +class ChecksumAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enumeration.""" + + SHA256 = "sha256" + + class Enum0(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum0.""" @@ -22,6 +28,18 @@ class Enum1(str, Enum, metaclass=CaseInsensitiveEnumMeta): ) +class Enum10(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum10.""" + + ASC = "asc" + + +class Enum11(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum11.""" + + DSC = "dsc" + + class Enum2(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum2.""" @@ -40,18 +58,6 @@ class Enum4(str, Enum, metaclass=CaseInsensitiveEnumMeta): S256 = "S256" -class Enum8(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum8.""" - - ASC = "asc" - - -class Enum9(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum9.""" - - DSC = "dsc" - - class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration.""" @@ -72,6 +78,12 @@ class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): RESCHEDULED = "Rescheduled" +class SandboxFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enumeration.""" + + TAR_BZ2 = "tar.bz2" + + class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration.""" diff --git a/src/diracx/client/models/_models.py b/src/diracx/client/models/_models.py index 5578ad22..716f06f7 100644 --- a/src/diracx/client/models/_models.py +++ b/src/diracx/client/models/_models.py @@ -6,7 +6,7 @@ # -------------------------------------------------------------------------- import datetime -from typing import Any, List, Optional, TYPE_CHECKING, Union +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union from .. import _serialization @@ -509,6 +509,139 @@ def __init__( self.application_status = application_status +class SandboxDownloadResponse(_serialization.Model): + """SandboxDownloadResponse. + + All required parameters must be populated in order to send to Azure. + + :ivar url: Url. Required. + :vartype url: str + :ivar expires_in: Expires In. Required. + :vartype expires_in: int + """ + + _validation = { + "url": {"required": True}, + "expires_in": {"required": True}, + } + + _attribute_map = { + "url": {"key": "url", "type": "str"}, + "expires_in": {"key": "expires_in", "type": "int"}, + } + + def __init__(self, *, url: str, expires_in: int, **kwargs: Any) -> None: + """ + :keyword url: Url. Required. + :paramtype url: str + :keyword expires_in: Expires In. Required. + :paramtype expires_in: int + """ + super().__init__(**kwargs) + self.url = url + self.expires_in = expires_in + + +class SandboxInfo(_serialization.Model): + """SandboxInfo. + + All required parameters must be populated in order to send to Azure. + + :ivar checksum_algorithm: An enumeration. Required. "sha256" + :vartype checksum_algorithm: str or ~client.models.ChecksumAlgorithm + :ivar checksum: Checksum. Required. + :vartype checksum: str + :ivar size: Size. Required. + :vartype size: int + :ivar format: An enumeration. Required. "tar.bz2" + :vartype format: str or ~client.models.SandboxFormat + """ + + _validation = { + "checksum_algorithm": {"required": True}, + "checksum": {"required": True, "pattern": r"^[0-f]{64}$"}, + "size": {"required": True, "minimum": 1}, + "format": {"required": True}, + } + + _attribute_map = { + "checksum_algorithm": {"key": "checksum_algorithm", "type": "str"}, + "checksum": {"key": "checksum", "type": "str"}, + "size": {"key": "size", "type": "int"}, + "format": {"key": "format", "type": "str"}, + } + + def __init__( + self, + *, + checksum_algorithm: Union[str, "_models.ChecksumAlgorithm"], + checksum: str, + size: int, + format: Union[str, "_models.SandboxFormat"], + **kwargs: Any + ) -> None: + """ + :keyword checksum_algorithm: An enumeration. Required. "sha256" + :paramtype checksum_algorithm: str or ~client.models.ChecksumAlgorithm + :keyword checksum: Checksum. Required. + :paramtype checksum: str + :keyword size: Size. Required. + :paramtype size: int + :keyword format: An enumeration. Required. "tar.bz2" + :paramtype format: str or ~client.models.SandboxFormat + """ + super().__init__(**kwargs) + self.checksum_algorithm = checksum_algorithm + self.checksum = checksum + self.size = size + self.format = format + + +class SandboxUploadResponse(_serialization.Model): + """SandboxUploadResponse. + + All required parameters must be populated in order to send to Azure. + + :ivar pfn: Pfn. Required. + :vartype pfn: str + :ivar url: Url. + :vartype url: str + :ivar fields: Fields. + :vartype fields: dict[str, str] + """ + + _validation = { + "pfn": {"required": True}, + } + + _attribute_map = { + "pfn": {"key": "pfn", "type": "str"}, + "url": {"key": "url", "type": "str"}, + "fields": {"key": "fields", "type": "{str}"}, + } + + def __init__( + self, + *, + pfn: str, + url: Optional[str] = None, + fields: Optional[Dict[str, str]] = None, + **kwargs: Any + ) -> None: + """ + :keyword pfn: Pfn. Required. + :paramtype pfn: str + :keyword url: Url. + :paramtype url: str + :keyword fields: Fields. + :paramtype fields: dict[str, str] + """ + super().__init__(**kwargs) + self.pfn = pfn + self.url = url + self.fields = fields + + class ScalarSearchSpec(_serialization.Model): """ScalarSearchSpec. diff --git a/src/diracx/client/operations/_operations.py b/src/diracx/client/operations/_operations.py index 72dea1c1..bb2d7807 100644 --- a/src/diracx/client/operations/_operations.py +++ b/src/diracx/client/operations/_operations.py @@ -280,6 +280,55 @@ def build_config_serve_config_request( return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) +def build_jobs_get_sandbox_file_request(*, pfn: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/sandbox" + + # Construct parameters + _params["pfn"] = _SERIALIZER.query( + "pfn", + pfn, + "str", + max_length=256, + pattern=r"^/S3/[a-z0-9\.\-]{3,63}(?:/[^/]+){3}/[a-z0-9]{3,10}:[0-9a-f]{64}\.[a-z0-9\.]+$", + ) + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest( + method="GET", url=_url, params=_params, headers=_headers, **kwargs + ) + + +def build_jobs_initiate_sandbox_upload_request( + **kwargs: Any, +) -> HttpRequest: # pylint: disable=name-too-long + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/sandbox" + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header( + "content_type", content_type, "str" + ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + def build_jobs_submit_bulk_jobs_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -1324,6 +1373,198 @@ def __init__(self, *args, **kwargs): input_args.pop(0) if input_args else kwargs.pop("deserializer") ) + @distributed_trace + def get_sandbox_file( + self, *, pfn: str, **kwargs: Any + ) -> _models.SandboxDownloadResponse: + """Get Sandbox File. + + Get a presigned URL to download a sandbox file + + This route cannot use a redirect response most clients will also send the + authorization header when following a redirect. This is not desirable as + it would leak the authorization token to the storage backend. Additionally, + most storage backends return an error when they receive an authorization + header for a presigned URL. + + :keyword pfn: Required. + :paramtype pfn: str + :return: SandboxDownloadResponse + :rtype: ~client.models.SandboxDownloadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SandboxDownloadResponse] = kwargs.pop("cls", None) + + request = build_jobs_get_sandbox_file_request( + pfn=pfn, + headers=_headers, + params=_params, + ) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("SandboxDownloadResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + @overload + def initiate_sandbox_upload( + self, + body: _models.SandboxInfo, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: ~client.models.SandboxInfo + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def initiate_sandbox_upload( + self, body: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def initiate_sandbox_upload( + self, body: Union[_models.SandboxInfo, IO], **kwargs: Any + ) -> _models.SandboxUploadResponse: + """Initiate Sandbox Upload. + + Get the PFN for the given sandbox, initiate an upload as required. + + If the sandbox already exists in the database then the PFN is returned + and there is no "url" field in the response. + + If the sandbox does not exist in the database then the "url" and "fields" + should be used to upload the sandbox to the storage backend. + + :param body: Is either a SandboxInfo type or a IO type. Required. + :type body: ~client.models.SandboxInfo or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :return: SandboxUploadResponse + :rtype: ~client.models.SandboxUploadResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop( + "content_type", _headers.pop("Content-Type", None) + ) + cls: ClsType[_models.SandboxUploadResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "SandboxInfo") + + request = build_jobs_initiate_sandbox_upload_request( + content_type=content_type, + json=_json, + content=_content, + headers=_headers, + params=_params, + ) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = ( + self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error( + status_code=response.status_code, response=response, error_map=error_map + ) + raise HttpResponseError(response=response) + + deserialized = self._deserialize("SandboxUploadResponse", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + @overload def submit_bulk_jobs( self, body: List[str], *, content_type: str = "application/json", **kwargs: Any