Skip to content

Commit

Permalink
Regenerate client
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisburr committed Oct 3, 2023
1 parent 3e7f3a7 commit ea886a1
Show file tree
Hide file tree
Showing 5 changed files with 607 additions and 17 deletions.
194 changes: 194 additions & 0 deletions src/diracx/client/aio/operations/_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,11 @@
build_jobs_delete_bulk_jobs_request,
build_jobs_get_job_status_bulk_request,
build_jobs_get_job_status_history_bulk_request,
build_jobs_get_sandbox_file_request,
build_jobs_get_single_job_request,
build_jobs_get_single_job_status_history_request,
build_jobs_get_single_job_status_request,
build_jobs_initiate_sandbox_upload_request,
build_jobs_kill_bulk_jobs_request,
build_jobs_search_request,
build_jobs_set_job_status_bulk_request,
Expand Down Expand Up @@ -819,6 +821,198 @@ def __init__(self, *args, **kwargs) -> None:
input_args.pop(0) if input_args else kwargs.pop("deserializer")
)

@distributed_trace_async
async def get_sandbox_file(
self, *, pfn: str, **kwargs: Any
) -> _models.SandboxDownloadResponse:
"""Get Sandbox File.
Get a presigned URL to download a sandbox file
This route cannot use a redirect response most clients will also send the
authorization header when following a redirect. This is not desirable as
it would leak the authorization token to the storage backend. Additionally,
most storage backends return an error when they receive an authorization
header for a presigned URL.
:keyword pfn: Required.
:paramtype pfn: str
:return: SandboxDownloadResponse
:rtype: ~client.models.SandboxDownloadResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}

cls: ClsType[_models.SandboxDownloadResponse] = kwargs.pop("cls", None)

request = build_jobs_get_sandbox_file_request(
pfn=pfn,
headers=_headers,
params=_params,
)
request.url = self._client.format_url(request.url)

_stream = False
pipeline_response: PipelineResponse = (
await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
)

response = pipeline_response.http_response

if response.status_code not in [200]:
map_error(
status_code=response.status_code, response=response, error_map=error_map
)
raise HttpResponseError(response=response)

deserialized = self._deserialize("SandboxDownloadResponse", pipeline_response)

if cls:
return cls(pipeline_response, deserialized, {})

return deserialized

@overload
async def initiate_sandbox_upload(
self,
body: _models.SandboxInfo,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.SandboxUploadResponse:
"""Initiate Sandbox Upload.
Get the PFN for the given sandbox, initiate an upload as required.
If the sandbox already exists in the database then the PFN is returned
and there is no "url" field in the response.
If the sandbox does not exist in the database then the "url" and "fields"
should be used to upload the sandbox to the storage backend.
:param body: Required.
:type body: ~client.models.SandboxInfo
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: SandboxUploadResponse
:rtype: ~client.models.SandboxUploadResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""

@overload
async def initiate_sandbox_upload(
self, body: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.SandboxUploadResponse:
"""Initiate Sandbox Upload.
Get the PFN for the given sandbox, initiate an upload as required.
If the sandbox already exists in the database then the PFN is returned
and there is no "url" field in the response.
If the sandbox does not exist in the database then the "url" and "fields"
should be used to upload the sandbox to the storage backend.
:param body: Required.
:type body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: SandboxUploadResponse
:rtype: ~client.models.SandboxUploadResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""

@distributed_trace_async
async def initiate_sandbox_upload(
self, body: Union[_models.SandboxInfo, IO], **kwargs: Any
) -> _models.SandboxUploadResponse:
"""Initiate Sandbox Upload.
Get the PFN for the given sandbox, initiate an upload as required.
If the sandbox already exists in the database then the PFN is returned
and there is no "url" field in the response.
If the sandbox does not exist in the database then the "url" and "fields"
should be used to upload the sandbox to the storage backend.
:param body: Is either a SandboxInfo type or a IO type. Required.
:type body: ~client.models.SandboxInfo or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:return: SandboxUploadResponse
:rtype: ~client.models.SandboxUploadResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}

content_type: Optional[str] = kwargs.pop(
"content_type", _headers.pop("Content-Type", None)
)
cls: ClsType[_models.SandboxUploadResponse] = kwargs.pop("cls", None)

content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(body, (IOBase, bytes)):
_content = body
else:
_json = self._serialize.body(body, "SandboxInfo")

request = build_jobs_initiate_sandbox_upload_request(
content_type=content_type,
json=_json,
content=_content,
headers=_headers,
params=_params,
)
request.url = self._client.format_url(request.url)

_stream = False
pipeline_response: PipelineResponse = (
await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
)

response = pipeline_response.http_response

if response.status_code not in [200]:
map_error(
status_code=response.status_code, response=response, error_map=error_map
)
raise HttpResponseError(response=response)

deserialized = self._deserialize("SandboxUploadResponse", pipeline_response)

if cls:
return cls(pipeline_response, deserialized, {})

return deserialized

@overload
async def submit_bulk_jobs(
self, body: List[str], *, content_type: str = "application/json", **kwargs: Any
Expand Down
18 changes: 14 additions & 4 deletions src/diracx/client/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@
from ._models import JobSummaryParams
from ._models import JobSummaryParamsSearchItem
from ._models import LimitedJobStatusReturn
from ._models import SandboxDownloadResponse
from ._models import SandboxInfo
from ._models import SandboxUploadResponse
from ._models import ScalarSearchSpec
from ._models import SetJobStatusReturn
from ._models import SortSpec
Expand All @@ -26,14 +29,16 @@
from ._models import ValidationErrorLocItem
from ._models import VectorSearchSpec

from ._enums import ChecksumAlgorithm
from ._enums import Enum0
from ._enums import Enum1
from ._enums import Enum10
from ._enums import Enum11
from ._enums import Enum2
from ._enums import Enum3
from ._enums import Enum4
from ._enums import Enum8
from ._enums import Enum9
from ._enums import JobStatus
from ._enums import SandboxFormat
from ._enums import ScalarSearchOperator
from ._enums import VectorSearchOperator
from ._patch import __all__ as _patch_all
Expand All @@ -53,6 +58,9 @@
"JobSummaryParams",
"JobSummaryParamsSearchItem",
"LimitedJobStatusReturn",
"SandboxDownloadResponse",
"SandboxInfo",
"SandboxUploadResponse",
"ScalarSearchSpec",
"SetJobStatusReturn",
"SortSpec",
Expand All @@ -62,14 +70,16 @@
"ValidationError",
"ValidationErrorLocItem",
"VectorSearchSpec",
"ChecksumAlgorithm",
"Enum0",
"Enum1",
"Enum10",
"Enum11",
"Enum2",
"Enum3",
"Enum4",
"Enum8",
"Enum9",
"JobStatus",
"SandboxFormat",
"ScalarSearchOperator",
"VectorSearchOperator",
]
Expand Down
36 changes: 24 additions & 12 deletions src/diracx/client/models/_enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
from azure.core import CaseInsensitiveEnumMeta


class ChecksumAlgorithm(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration."""

SHA256 = "sha256"


class Enum0(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum0."""

Expand All @@ -22,6 +28,18 @@ class Enum1(str, Enum, metaclass=CaseInsensitiveEnumMeta):
)


class Enum10(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum10."""

ASC = "asc"


class Enum11(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum11."""

DSC = "dsc"


class Enum2(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum2."""

Expand All @@ -40,18 +58,6 @@ class Enum4(str, Enum, metaclass=CaseInsensitiveEnumMeta):
S256 = "S256"


class Enum8(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum8."""

ASC = "asc"


class Enum9(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Enum9."""

DSC = "dsc"


class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration."""

Expand All @@ -72,6 +78,12 @@ class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
RESCHEDULED = "Rescheduled"


class SandboxFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration."""

TAR_BZ2 = "tar.bz2"


class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""An enumeration."""

Expand Down
Loading

0 comments on commit ea886a1

Please sign in to comment.