diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index f132eab3f42f8..dd39896e10e6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -1,11 +1,11 @@ { - "commit": "92a02c9755fa3b2359df8dfc031babcddf6d91ba", + "commit": "82fb7e51cd1efb0b5f1ed80a24f8cd08f91bb5e6", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest": "3.10.2", "use": [ - "@autorest/python@6.13.16", + "@autorest/python@6.15.0", "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.13.16 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.15.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index 9d12f544c4192..127c83a86a1fb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse @@ -230,7 +231,7 @@ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "DataFactoryManagementClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py index 2f781d740827a..8139854b97bb8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py @@ -144,6 +144,8 @@ def _json_attemp(data): # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod @@ -1441,7 +1443,7 @@ def _deserialize(self, target_obj, data): elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) - if data is None: + if data is None or data is CoreNull: return data try: attributes = response._attribute_map # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py deleted file mode 100644 index 0dafe0e287ff1..0000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py +++ /dev/null @@ -1,16 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.pipeline.transport import HttpRequest - - -def _convert_request(request, files=None): - data = request.content if not files else None - request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) - if files: - request.set_formdata_body(files) - return request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 142a0420b39b4..c47f66669f1bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "8.0.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py index ec9b4794fbad1..597d4988e2b8a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py @@ -8,6 +8,7 @@ from copy import deepcopy from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest @@ -233,7 +234,7 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "DataFactoryManagementClient": + async def __aenter__(self) -> Self: await self._client.__aenter__() return self diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py index a45e6ed5bca21..ccfa35e15c734 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._activity_runs_operations import build_query_by_pipeline_run_request if sys.version_info >= (3, 9): @@ -173,7 +171,6 @@ async def query_by_pipeline_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -187,7 +184,7 @@ async def query_by_pipeline_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py index bf52dd3a28e1e..8b8fabbfbe20d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._change_data_capture_operations import ( build_create_or_update_request, build_delete_request, @@ -108,7 +106,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +121,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -287,7 +283,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +296,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -357,7 +352,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -371,7 +365,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -417,7 +411,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -473,7 +466,6 @@ async def start( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -529,7 +521,6 @@ async def stop( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -585,7 +576,6 @@ async def status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -599,7 +589,7 @@ async def status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py index 345cf3b20aa22..894ca06edda9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._credential_operations_operations import ( build_create_or_update_request, build_delete_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -279,7 +275,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -293,7 +288,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -349,7 +344,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -365,7 +359,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -411,7 +405,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py index f3e4c2ccbc795..d4af31c851a02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -21,9 +21,8 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +30,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_flow_debug_session_operations import ( build_add_data_flow_request, build_create_request, @@ -73,7 +71,7 @@ async def _create_initial( factory_name: str, request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -87,7 +85,7 @@ async def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -108,10 +106,9 @@ async def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -119,17 +116,19 @@ async def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -235,10 +234,11 @@ async def begin_create( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -301,7 +301,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -317,7 +316,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -453,7 +451,6 @@ async def add_data_flow( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -467,7 +464,7 @@ async def add_data_flow( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -581,7 +578,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -604,7 +600,7 @@ async def _execute_command_initial( factory_name: str, request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.DataFlowDebugCommandResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -618,7 +614,7 @@ async def _execute_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -639,10 +635,9 @@ async def _execute_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -650,17 +645,19 @@ async def _execute_command_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -766,10 +763,11 @@ async def begin_execute_command( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py index d8c02e74412d0..e884f876bb440 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._data_flows_operations import ( build_create_or_update_request, build_delete_request, @@ -194,7 +192,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -208,7 +205,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -264,7 +261,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -278,7 +274,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -324,7 +320,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -381,7 +376,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -397,7 +391,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py index 2bd7d817a9e70..9877c25864523 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._datasets_operations import ( build_create_or_update_request, build_delete_request, @@ -103,7 +101,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -119,7 +116,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -278,7 +274,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -292,7 +287,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -348,7 +343,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -364,7 +358,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -410,7 +404,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py index fc55ec9c00354..9378b4cbc2db0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._exposure_control_operations import ( build_get_feature_value_by_factory_request, build_get_feature_value_request, @@ -158,7 +156,6 @@ async def get_feature_value( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -172,7 +169,7 @@ async def get_feature_value( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -287,7 +284,6 @@ async def get_feature_value_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +297,7 @@ async def get_feature_value_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -419,7 +415,6 @@ async def query_feature_values_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -433,7 +428,7 @@ async def query_feature_values_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py index 141ac5bfd894e..909d7e58b3fce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._factories_operations import ( build_configure_factory_repo_request, build_create_or_update_request, @@ -100,7 +98,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -116,7 +113,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -234,7 +230,6 @@ async def configure_factory_repo( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -248,7 +243,7 @@ async def configure_factory_repo( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -289,7 +284,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -305,7 +299,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -454,7 +447,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -468,7 +460,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -583,7 +575,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -597,7 +588,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -645,7 +636,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -661,7 +651,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -704,7 +694,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -829,7 +818,6 @@ async def get_git_hub_access_token( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -843,7 +831,7 @@ async def get_git_hub_access_token( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -957,7 +945,6 @@ async def get_data_plane_access( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -971,7 +958,7 @@ async def get_data_plane_access( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py index 3ba8eb0e8fc03..e65591c4497e3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._global_parameters_operations import ( build_create_or_update_request, build_delete_request, @@ -105,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -121,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -189,7 +185,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -203,7 +198,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -327,7 +322,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -341,7 +335,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -387,7 +381,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py index ad6d4e6ce278d..491a0c2790075 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtime_nodes_operations import ( build_delete_request, build_get_ip_address_request, @@ -103,7 +101,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -117,7 +114,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -166,7 +163,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -315,7 +311,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -329,7 +324,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -378,7 +373,6 @@ async def get_ip_address( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -392,7 +386,7 @@ async def get_ip_address( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 398280e5e34e6..ff0f5177cb966 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,16 +19,14 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request if sys.version_info >= (3, 9): @@ -60,7 +58,7 @@ def __init__(self, *args, **kwargs) -> None: async def _refresh_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.SsisObjectMetadataStatusResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -73,7 +71,7 @@ async def _refresh_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_refresh_request( resource_group_name=resource_group_name, @@ -84,10 +82,9 @@ async def _refresh_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -95,12 +92,15 @@ async def _refresh_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -144,10 +144,11 @@ async def begin_refresh( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -295,7 +296,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -309,7 +309,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py index ed69ede06e6f3..d791a1dd172f0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -21,9 +21,8 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +30,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._integration_runtimes_operations import ( build_create_linked_integration_runtime_request, build_create_or_update_request, @@ -120,7 +118,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -136,7 +133,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -299,7 +295,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -313,7 +308,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -369,7 +364,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -385,7 +379,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -513,7 +507,6 @@ async def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -527,7 +520,7 @@ async def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -573,7 +566,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -629,7 +621,6 @@ async def get_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -643,7 +634,7 @@ async def get_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -691,7 +682,6 @@ async def list_outbound_network_dependencies_endpoints( # pylint: disable=name- headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -706,7 +696,7 @@ async def list_outbound_network_dependencies_endpoints( # pylint: disable=name- raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize( - "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response.http_response ) if cls: @@ -754,7 +744,6 @@ async def get_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -768,7 +757,7 @@ async def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -897,7 +886,6 @@ async def regenerate_auth_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -911,7 +899,7 @@ async def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -957,7 +945,6 @@ async def list_auth_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -971,7 +958,7 @@ async def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -980,7 +967,7 @@ async def list_auth_keys( async def _start_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.IntegrationRuntimeStatusResponse]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -993,7 +980,7 @@ async def _start_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1004,10 +991,9 @@ async def _start_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1015,12 +1001,15 @@ async def _start_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1064,10 +1053,11 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1089,9 +1079,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _stop_initial( # pylint: disable=inconsistent-return-statements + async def _stop_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1104,7 +1094,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1115,10 +1105,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1126,11 +1115,20 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + if response.status_code == 200: + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_stop( @@ -1157,7 +1155,7 @@ async def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -1167,6 +1165,7 @@ async def begin_stop( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1230,7 +1229,6 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1287,7 +1285,6 @@ async def get_monitoring_data( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1301,7 +1298,7 @@ async def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1347,7 +1344,6 @@ async def upgrade( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1488,7 +1484,6 @@ async def remove_links( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1628,7 +1623,6 @@ async def create_linked_integration_runtime( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1642,7 +1636,7 @@ async def create_linked_integration_runtime( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py index a31e4b4db3680..6f20219541a7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._linked_services_operations import ( build_create_or_update_request, build_delete_request, @@ -105,7 +103,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -121,7 +118,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -283,7 +279,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -297,7 +292,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -353,7 +348,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -369,7 +363,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -415,7 +409,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py index f09e7b1641e56..da3e687c4de14 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._managed_private_endpoints_operations import ( build_create_or_update_request, build_delete_request, @@ -108,7 +106,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -124,7 +121,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -297,7 +293,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -311,7 +306,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -371,7 +366,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -385,7 +379,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -439,7 +433,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py index 4a4f128d4bae5..d393270392229 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._managed_virtual_networks_operations import ( build_create_or_update_request, build_get_request, @@ -104,7 +102,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -120,7 +117,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -283,7 +279,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -297,7 +292,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -353,7 +348,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -367,7 +361,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py index a848d37430caa..d5b7a449d0840 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._operations import build_list_request if sys.version_info >= (3, 9): @@ -87,7 +85,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -103,7 +100,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py index c6ae1b858107a..136dd4e28d062 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._pipeline_runs_operations import ( build_cancel_request, build_get_request, @@ -167,7 +165,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -181,7 +178,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -225,7 +222,6 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -239,7 +235,7 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRun", pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -294,7 +290,6 @@ async def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py index d5e054e681598..1d7c423bdfff3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py @@ -21,15 +21,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._pipelines_operations import ( build_create_or_update_request, build_create_run_request, @@ -106,7 +104,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -122,7 +119,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -281,7 +277,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -295,7 +290,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -351,7 +346,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -367,7 +361,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -413,7 +407,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -611,7 +604,6 @@ async def create_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -625,7 +617,7 @@ async def create_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CreateRunResponse", pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py index aaa8d9cb39c24..4448a257ebc33 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -20,14 +20,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_end_point_connections_operations import build_list_by_factory_request if sys.version_info >= (3, 9): @@ -98,7 +96,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -114,7 +111,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py index d444ea0bf9d6a..0708068b34b38 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_endpoint_connection_operations import ( build_create_or_update_request, build_delete_request, @@ -195,7 +193,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -209,7 +206,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -265,7 +262,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -279,7 +275,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -325,7 +321,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py index deefa40fd5b35..0a374306c498c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py @@ -18,14 +18,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_get_request if sys.version_info >= (3, 9): @@ -91,7 +89,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -105,7 +102,7 @@ async def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py index bf3baef1b991d..83963ccb52121 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py @@ -19,14 +19,12 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models -from ..._vendor import _convert_request from ...operations._trigger_runs_operations import ( build_cancel_request, build_query_by_factory_request, @@ -102,7 +100,6 @@ async def rerun( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -161,7 +158,6 @@ async def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -285,7 +281,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -299,7 +294,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py index 98cc206b7c9df..b36e5efeac301 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList @@ -21,9 +21,8 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict @@ -31,7 +30,6 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._vendor import _convert_request from ...operations._triggers_operations import ( build_create_or_update_request, build_delete_request, @@ -111,7 +109,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -127,7 +124,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -263,7 +259,6 @@ async def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -277,7 +272,7 @@ async def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -414,7 +409,6 @@ async def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -428,7 +422,7 @@ async def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -484,7 +478,6 @@ async def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -500,7 +493,7 @@ async def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -546,7 +539,6 @@ async def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -565,7 +557,7 @@ async def delete( # pylint: disable=inconsistent-return-statements async def _subscribe_to_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -578,7 +570,7 @@ async def _subscribe_to_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_subscribe_to_events_request( resource_group_name=resource_group_name, @@ -589,10 +581,9 @@ async def _subscribe_to_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -600,12 +591,15 @@ async def _subscribe_to_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -649,10 +643,11 @@ async def begin_subscribe_to_events( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -713,7 +708,6 @@ async def get_event_subscription_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -727,7 +721,7 @@ async def get_event_subscription_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -736,7 +730,7 @@ async def get_event_subscription_status( async def _unsubscribe_from_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -749,7 +743,7 @@ async def _unsubscribe_from_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, @@ -760,10 +754,9 @@ async def _unsubscribe_from_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -771,12 +764,15 @@ async def _unsubscribe_from_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -820,10 +816,11 @@ async def begin_unsubscribe_from_events( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -845,9 +842,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - async def _start_initial( # pylint: disable=inconsistent-return-statements + async def _start_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -860,7 +857,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -871,10 +868,9 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -882,11 +878,16 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_start( @@ -913,7 +914,7 @@ async def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( # type: ignore + raw_result = await self._start_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -923,6 +924,7 @@ async def begin_start( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -944,9 +946,9 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - async def _stop_initial( # pylint: disable=inconsistent-return-statements + async def _stop_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> AsyncIterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -959,7 +961,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -970,10 +972,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -981,11 +982,16 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + await response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace_async async def begin_stop( @@ -1012,7 +1018,7 @@ async def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( # type: ignore + raw_result = await self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1022,6 +1028,7 @@ async def begin_stop( params=_params, **kwargs ) + await raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 53a0c4e851d9b..6b62001aae792 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -153,6 +153,7 @@ from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource from ._models_py3 import ConnectionStateProperties +from ._models_py3 import ContinuationSettingsReference from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity from ._models_py3 import CopyActivityLogSettings @@ -1071,6 +1072,7 @@ "ConcurObjectDataset", "ConcurSource", "ConnectionStateProperties", + "ContinuationSettingsReference", "ControlActivity", "CopyActivity", "CopyActivityLogSettings", diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 5f07e7eab04d2..146bb2c7e39e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -343,10 +343,11 @@ class GlobalParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OBJECT = "Object" STRING = "String" - INT_ENUM = "Int" + INT = "Int" FLOAT = "Float" BOOL = "Bool" ARRAY = "Array" + INT_ENUM = "Int" class GoogleAdWordsAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -588,9 +589,10 @@ class NotebookParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Notebook parameter type.""" STRING = "string" - INT_ENUM = "int" + INT = "int" FLOAT = "float" BOOL = "bool" + INT_ENUM = "int" class NotebookReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -638,11 +640,12 @@ class ParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): OBJECT = "Object" STRING = "String" - INT_ENUM = "Int" + INT = "Int" FLOAT = "Float" BOOL = "Bool" ARRAY = "Array" SECURE_STRING = "SecureString" + INT_ENUM = "Int" class PhoenixAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -728,8 +731,9 @@ class RunQueryFilterOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): EQUALS = "Equals" NOT_EQUALS = "NotEquals" - IN_ENUM = "In" + IN = "In" NOT_IN = "NotIn" + IN_ENUM = "In" class RunQueryOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -964,6 +968,7 @@ class SqlServerAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SQL = "SQL" WINDOWS = "Windows" + USER_ASSIGNED_MANAGED_IDENTITY = "UserAssignedManagedIdentity" class SqlWriteBehaviorEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1002,12 +1007,13 @@ class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta) """Stored procedure parameter type.""" STRING = "String" - INT_ENUM = "Int" + INT = "Int" INT64 = "Int64" DECIMAL = "Decimal" GUID = "Guid" BOOLEAN = "Boolean" DATE = "Date" + INT_ENUM = "Int" class SybaseAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index c0b5f7a60d601..efaa5fc4f0c6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -19400,6 +19400,45 @@ def __init__(self, **kwargs: Any) -> None: self.status = None +class ContinuationSettingsReference(_serialization.Model): + """Continuation settings for execute data flow activity. + + :ivar continuation_ttl_in_minutes: Continuation TTL in minutes. + :vartype continuation_ttl_in_minutes: JSON + :ivar idle_condition: Idle condition. + :vartype idle_condition: JSON + :ivar customized_checkpoint_key: Customized checkpoint key. + :vartype customized_checkpoint_key: JSON + """ + + _attribute_map = { + "continuation_ttl_in_minutes": {"key": "continuationTtlInMinutes", "type": "object"}, + "idle_condition": {"key": "idleCondition", "type": "object"}, + "customized_checkpoint_key": {"key": "customizedCheckpointKey", "type": "object"}, + } + + def __init__( + self, + *, + continuation_ttl_in_minutes: Optional[JSON] = None, + idle_condition: Optional[JSON] = None, + customized_checkpoint_key: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword continuation_ttl_in_minutes: Continuation TTL in minutes. + :paramtype continuation_ttl_in_minutes: JSON + :keyword idle_condition: Idle condition. + :paramtype idle_condition: JSON + :keyword customized_checkpoint_key: Customized checkpoint key. + :paramtype customized_checkpoint_key: JSON + """ + super().__init__(**kwargs) + self.continuation_ttl_in_minutes = continuation_ttl_in_minutes + self.idle_condition = idle_condition + self.customized_checkpoint_key = customized_checkpoint_key + + class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Copy activity. @@ -27661,6 +27700,8 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -27698,6 +27739,10 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": { + "key": "typeProperties.continuationSettings", + "type": "ContinuationSettingsReference", + }, "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, @@ -27720,6 +27765,7 @@ def __init__( policy: Optional["_models.ActivityPolicy"] = None, staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -27756,6 +27802,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -27788,6 +27836,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -27806,6 +27855,8 @@ class ExecuteDataFlowActivityTypeProperties(_serialization.Model): :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -27831,6 +27882,7 @@ class ExecuteDataFlowActivityTypeProperties(_serialization.Model): "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": {"key": "continuationSettings", "type": "ContinuationSettingsReference"}, "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "traceLevel", "type": "object"}, "continue_on_error": {"key": "continueOnError", "type": "object"}, @@ -27844,6 +27896,7 @@ def __init__( data_flow: "_models.DataFlowReference", staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -27858,6 +27911,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -27878,6 +27933,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -28075,7 +28131,9 @@ def __init__( self.secure_input = secure_input -class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypeProperties): +class ExecutePowerQueryActivityTypeProperties( + ExecuteDataFlowActivityTypeProperties +): # pylint: disable=too-many-instance-attributes """Execute power query data flow activity properties. All required parameters must be populated in order to send to server. @@ -28086,6 +28144,8 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -28116,6 +28176,7 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert "data_flow": {"key": "dataFlow", "type": "DataFlowReference"}, "staging": {"key": "staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": {"key": "continuationSettings", "type": "ContinuationSettingsReference"}, "compute": {"key": "compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "traceLevel", "type": "object"}, "continue_on_error": {"key": "continueOnError", "type": "object"}, @@ -28131,6 +28192,7 @@ def __init__( data_flow: "_models.DataFlowReference", staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -28147,6 +28209,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -28172,6 +28236,7 @@ def __init__( data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, + continuation_settings=continuation_settings, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, @@ -28424,6 +28489,8 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in :vartype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :ivar integration_runtime: The integration runtime reference. :vartype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar continuation_settings: Continuation settings for execute data flow activity. + :vartype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :ivar compute: Compute properties for data flow activity. :vartype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :ivar trace_level: Trace level setting used for data flow monitoring output. Supported values @@ -28465,6 +28532,10 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in "data_flow": {"key": "typeProperties.dataFlow", "type": "DataFlowReference"}, "staging": {"key": "typeProperties.staging", "type": "DataFlowStagingInfo"}, "integration_runtime": {"key": "typeProperties.integrationRuntime", "type": "IntegrationRuntimeReference"}, + "continuation_settings": { + "key": "typeProperties.continuationSettings", + "type": "ContinuationSettingsReference", + }, "compute": {"key": "typeProperties.compute", "type": "ExecuteDataFlowActivityTypePropertiesCompute"}, "trace_level": {"key": "typeProperties.traceLevel", "type": "object"}, "continue_on_error": {"key": "typeProperties.continueOnError", "type": "object"}, @@ -28488,6 +28559,7 @@ def __init__( policy: Optional["_models.ActivityPolicy"] = None, staging: Optional["_models.DataFlowStagingInfo"] = None, integration_runtime: Optional["_models.IntegrationRuntimeReference"] = None, + continuation_settings: Optional["_models.ContinuationSettingsReference"] = None, compute: Optional["_models.ExecuteDataFlowActivityTypePropertiesCompute"] = None, trace_level: Optional[JSON] = None, continue_on_error: Optional[JSON] = None, @@ -28524,6 +28596,8 @@ def __init__( :paramtype staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :keyword integration_runtime: The integration runtime reference. :paramtype integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword continuation_settings: Continuation settings for execute data flow activity. + :paramtype continuation_settings: ~azure.mgmt.datafactory.models.ContinuationSettingsReference :keyword compute: Compute properties for data flow activity. :paramtype compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute :keyword trace_level: Trace level setting used for data flow monitoring output. Supported @@ -28560,6 +28634,7 @@ def __init__( self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime + self.continuation_settings = continuation_settings self.compute = compute self.trace_level = trace_level self.continue_on_error = continue_on_error @@ -31219,7 +31294,7 @@ class GlobalParameterSpecification(_serialization.Model): All required parameters must be populated in order to send to server. :ivar type: Global Parameter type. Required. Known values are: "Object", "String", "Int", - "Float", "Bool", and "Array". + "Float", "Bool", "Array", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :ivar value: Value of parameter. Required. :vartype value: JSON @@ -31238,7 +31313,7 @@ class GlobalParameterSpecification(_serialization.Model): def __init__(self, *, type: Union[str, "_models.GlobalParameterType"], value: JSON, **kwargs: Any) -> None: """ :keyword type: Global Parameter type. Required. Known values are: "Object", "String", "Int", - "Float", "Bool", and "Array". + "Float", "Bool", "Array", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :keyword value: Value of parameter. Required. :paramtype value: JSON @@ -45721,7 +45796,8 @@ class NotebookParameter(_serialization.Model): :ivar value: Notebook parameter value. Type: string (or Expression with resultType string). :vartype value: JSON - :ivar type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :ivar type: Notebook parameter type. Known values are: "string", "int", "float", "bool", and + "int". :vartype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType """ @@ -45740,7 +45816,8 @@ def __init__( """ :keyword value: Notebook parameter value. Type: string (or Expression with resultType string). :paramtype value: JSON - :keyword type: Notebook parameter type. Known values are: "string", "int", "float", and "bool". + :keyword type: Notebook parameter type. Known values are: "string", "int", "float", "bool", and + "int". :paramtype type: str or ~azure.mgmt.datafactory.models.NotebookParameterType """ super().__init__(**kwargs) @@ -48930,7 +49007,7 @@ class ParameterSpecification(_serialization.Model): All required parameters must be populated in order to send to server. :ivar type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", and "SecureString". + "Bool", "Array", "SecureString", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.ParameterType :ivar default_value: Default value of parameter. :vartype default_value: JSON @@ -48950,7 +49027,7 @@ def __init__( ) -> None: """ :keyword type: Parameter type. Required. Known values are: "Object", "String", "Int", "Float", - "Bool", "Array", and "SecureString". + "Bool", "Array", "SecureString", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.ParameterType :keyword default_value: Default value of parameter. :paramtype default_value: JSON @@ -54523,7 +54600,7 @@ class RunQueryFilter(_serialization.Model): "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :ivar operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :ivar values: List of filter values. Required. :vartype values: list[str] @@ -54558,7 +54635,7 @@ def __init__( "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :keyword operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :keyword values: List of filter values. Required. :paramtype values: list[str] @@ -62952,6 +63029,9 @@ class SnowflakeExportCopyCommand(ExportSettings): object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. :vartype additional_format_options: dict[str, JSON] + :ivar storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :vartype storage_integration: JSON """ _validation = { @@ -62963,6 +63043,7 @@ class SnowflakeExportCopyCommand(ExportSettings): "type": {"key": "type", "type": "str"}, "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, + "storage_integration": {"key": "storageIntegration", "type": "object"}, } def __init__( @@ -62971,6 +63052,7 @@ def __init__( additional_properties: Optional[Dict[str, JSON]] = None, additional_copy_options: Optional[Dict[str, JSON]] = None, additional_format_options: Optional[Dict[str, JSON]] = None, + storage_integration: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -62987,11 +63069,15 @@ def __init__( object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }. :paramtype additional_format_options: dict[str, JSON] + :keyword storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :paramtype storage_integration: JSON """ super().__init__(additional_properties=additional_properties, **kwargs) self.type: str = "SnowflakeExportCopyCommand" self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options + self.storage_integration = storage_integration class SnowflakeImportCopyCommand(ImportSettings): @@ -63014,6 +63100,9 @@ class SnowflakeImportCopyCommand(ImportSettings): object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. :vartype additional_format_options: dict[str, JSON] + :ivar storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :vartype storage_integration: JSON """ _validation = { @@ -63025,6 +63114,7 @@ class SnowflakeImportCopyCommand(ImportSettings): "type": {"key": "type", "type": "str"}, "additional_copy_options": {"key": "additionalCopyOptions", "type": "{object}"}, "additional_format_options": {"key": "additionalFormatOptions", "type": "{object}"}, + "storage_integration": {"key": "storageIntegration", "type": "object"}, } def __init__( @@ -63033,6 +63123,7 @@ def __init__( additional_properties: Optional[Dict[str, JSON]] = None, additional_copy_options: Optional[Dict[str, JSON]] = None, additional_format_options: Optional[Dict[str, JSON]] = None, + storage_integration: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -63049,11 +63140,15 @@ def __init__( object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }. :paramtype additional_format_options: dict[str, JSON] + :keyword storage_integration: The name of the snowflake storage integration to use for the copy + operation. Type: string (or Expression with resultType string). + :paramtype storage_integration: JSON """ super().__init__(additional_properties=additional_properties, **kwargs) self.type: str = "SnowflakeImportCopyCommand" self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options + self.storage_integration = storage_integration class SnowflakeLinkedService(LinkedService): @@ -65198,7 +65293,7 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc AzureKeyVaultSecretReference. :vartype connection_string: JSON :ivar authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). @@ -65210,6 +65305,8 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _validation = { @@ -65251,6 +65348,7 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", }, + "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } def __init__( # pylint: disable=too-many-locals @@ -65286,6 +65384,7 @@ def __init__( # pylint: disable=too-many-locals password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ @@ -65383,7 +65482,7 @@ def __init__( # pylint: disable=too-many-locals AzureKeyVaultSecretReference. :paramtype connection_string: JSON :keyword authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :keyword user_name: The on-premises Windows authentication user name. Type: string (or @@ -65397,6 +65496,8 @@ def __init__( # pylint: disable=too-many-locals :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( additional_properties=additional_properties, @@ -65432,6 +65533,7 @@ def __init__( # pylint: disable=too-many-locals self.password = password self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class SqlServerLinkedServiceTypeProperties( @@ -65522,7 +65624,7 @@ class SqlServerLinkedServiceTypeProperties( AzureKeyVaultSecretReference. :vartype connection_string: JSON :ivar authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :ivar user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). @@ -65534,6 +65636,8 @@ class SqlServerLinkedServiceTypeProperties( :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :ivar credential: The credential reference containing authentication information. + :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ _attribute_map = { @@ -65562,6 +65666,7 @@ class SqlServerLinkedServiceTypeProperties( "password": {"key": "password", "type": "SecretBase"}, "encrypted_credential": {"key": "encryptedCredential", "type": "str"}, "always_encrypted_settings": {"key": "alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties"}, + "credential": {"key": "credential", "type": "CredentialReference"}, } def __init__( # pylint: disable=too-many-locals @@ -65592,6 +65697,7 @@ def __init__( # pylint: disable=too-many-locals password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, + credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: """ @@ -65678,7 +65784,7 @@ def __init__( # pylint: disable=too-many-locals AzureKeyVaultSecretReference. :paramtype connection_string: JSON :keyword authentication_type: The type used for authentication. Type: string. Known values are: - "SQL" and "Windows". + "SQL", "Windows", and "UserAssignedManagedIdentity". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.SqlServerAuthenticationType :keyword user_name: The on-premises Windows authentication user name. Type: string (or @@ -65692,6 +65798,8 @@ def __init__( # pylint: disable=too-many-locals :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties + :keyword credential: The credential reference containing authentication information. + :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ super().__init__( server=server, @@ -65721,6 +65829,7 @@ def __init__( # pylint: disable=too-many-locals self.password = password self.encrypted_credential = encrypted_credential self.always_encrypted_settings = always_encrypted_settings + self.credential = credential class SqlServerSink(CopySink): # pylint: disable=too-many-instance-attributes @@ -68056,7 +68165,7 @@ class StoredProcedureParameter(_serialization.Model): string). :vartype value: JSON :ivar type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", and "Date". + "Decimal", "Guid", "Boolean", "Date", and "Int". :vartype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ @@ -68077,7 +68186,7 @@ def __init__( string). :paramtype value: JSON :keyword type: Stored procedure parameter type. Known values are: "String", "Int", "Int64", - "Decimal", "Guid", "Boolean", and "Date". + "Decimal", "Guid", "Boolean", "Date", and "Int". :paramtype type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ super().__init__(**kwargs) @@ -71151,7 +71260,7 @@ def __init__( self.default_value = default_value -class VerticaLinkedService(LinkedService): +class VerticaLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Vertica linked service. All required parameters must be populated in order to send to server. @@ -71172,6 +71281,14 @@ class VerticaLinkedService(LinkedService): :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :vartype connection_string: JSON + :ivar server: Server name for connection. Type: string. + :vartype server: JSON + :ivar port: The port for the connection. Type: integer. + :vartype port: JSON + :ivar uid: Username for authentication. Type: string. + :vartype uid: JSON + :ivar database: Database name for connection. Type: string. + :vartype database: JSON :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -71191,6 +71308,10 @@ class VerticaLinkedService(LinkedService): "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "uid": {"key": "typeProperties.uid", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -71204,6 +71325,10 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, + port: Optional[JSON] = None, + uid: Optional[JSON] = None, + database: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any @@ -71223,6 +71348,14 @@ def __init__( :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :paramtype connection_string: JSON + :keyword server: Server name for connection. Type: string. + :paramtype server: JSON + :keyword port: The port for the connection. Type: integer. + :paramtype port: JSON + :keyword uid: Username for authentication. Type: string. + :paramtype uid: JSON + :keyword database: Database name for connection. Type: string. + :paramtype database: JSON :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -71239,6 +71372,10 @@ def __init__( ) self.type: str = "Vertica" self.connection_string = connection_string + self.server = server + self.port = port + self.uid = uid + self.database = database self.pwd = pwd self.encrypted_credential = encrypted_credential diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py index d3afb059e48b1..12464e49f4842 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -220,7 +218,6 @@ def query_by_pipeline_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -234,7 +231,7 @@ def query_by_pipeline_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("ActivityRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py index eea39471fce31..e7ffb77afaaab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -448,7 +446,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -464,7 +461,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -627,7 +623,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -641,7 +636,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -697,7 +692,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -711,7 +705,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -757,7 +751,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -813,7 +806,6 @@ def start( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -869,7 +861,6 @@ def stop( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -923,7 +914,6 @@ def status(self, resource_group_name: str, factory_name: str, change_data_captur headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -937,7 +927,7 @@ def status(self, resource_group_name: str, factory_name: str, change_data_captur map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("str", pipeline_response) + deserialized = self._deserialize("str", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py index 5ec7d70c2a12d..adbba32f483bc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -303,7 +301,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -319,7 +316,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -478,7 +474,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -492,7 +487,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -548,7 +543,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -564,7 +558,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("CredentialResource", pipeline_response) + deserialized = self._deserialize("CredentialResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +604,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index 879a98f2b5c09..75a190d38db29 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -21,9 +21,8 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +30,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -281,7 +279,7 @@ def _create_initial( factory_name: str, request: Union[_models.CreateDataFlowDebugSessionRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.CreateDataFlowDebugSessionResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -295,7 +293,7 @@ def _create_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -316,10 +314,9 @@ def _create_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -327,17 +324,19 @@ def _create_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -443,10 +442,11 @@ def begin_create( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("CreateDataFlowDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -508,7 +508,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -524,7 +523,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -660,7 +658,6 @@ def add_data_flow( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -674,7 +671,7 @@ def add_data_flow( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response) + deserialized = self._deserialize("AddDataFlowToDebugSessionResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -788,7 +785,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -811,7 +807,7 @@ def _execute_command_initial( factory_name: str, request: Union[_models.DataFlowDebugCommandRequest, IO[bytes]], **kwargs: Any - ) -> Optional[_models.DataFlowDebugCommandResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -825,7 +821,7 @@ def _execute_command_initial( api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None @@ -846,10 +842,9 @@ def _execute_command_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -857,17 +852,19 @@ def _execute_command_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) if response.status_code == 202: response_headers["location"] = self._deserialize("str", response.headers.get("location")) + deserialized = response.stream_download(self._client._pipeline) + if cls: return cls(pipeline_response, deserialized, response_headers) # type: ignore @@ -973,10 +970,11 @@ def begin_execute_command( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response) + deserialized = self._deserialize("DataFlowDebugCommandResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py index 7203524250bbe..062bf07233672 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -394,7 +392,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -408,7 +405,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -464,7 +461,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -478,7 +474,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DataFlowResource", pipeline_response) + deserialized = self._deserialize("DataFlowResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -524,7 +520,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -580,7 +575,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -596,7 +590,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py index e66e71189f370..62880ce2cad6f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -303,7 +301,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -319,7 +316,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -478,7 +474,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -492,7 +487,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -548,7 +543,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -564,7 +558,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("DatasetResource", pipeline_response) + deserialized = self._deserialize("DatasetResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +604,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py index 7eee946d5922b..94d33618346b5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -274,7 +272,6 @@ def get_feature_value( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -288,7 +285,7 @@ def get_feature_value( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -403,7 +400,6 @@ def get_feature_value_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -417,7 +413,7 @@ def get_feature_value_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -535,7 +531,6 @@ def query_feature_values_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -549,7 +544,7 @@ def query_feature_values_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response) + deserialized = self._deserialize("ExposureControlBatchResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py index b3e6a1adeab79..8854b1aced51f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -438,7 +436,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -454,7 +451,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -572,7 +568,6 @@ def configure_factory_repo( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -586,7 +581,7 @@ def configure_factory_repo( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -627,7 +622,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -643,7 +637,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -792,7 +785,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -806,7 +798,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -921,7 +913,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -935,7 +926,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -983,7 +974,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -999,7 +989,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("Factory", pipeline_response) + deserialized = self._deserialize("Factory", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1042,7 +1032,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1167,7 +1156,6 @@ def get_git_hub_access_token( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1181,7 +1169,7 @@ def get_git_hub_access_token( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response) + deserialized = self._deserialize("GitHubAccessTokenResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1295,7 +1283,6 @@ def get_data_plane_access( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1309,7 +1296,7 @@ def get_data_plane_access( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("AccessPolicyResponse", pipeline_response) + deserialized = self._deserialize("AccessPolicyResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py index b2eaa48001d42..e68dbb25f6f87 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -288,7 +286,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -304,7 +301,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -372,7 +368,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -386,7 +381,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -510,7 +505,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -524,7 +518,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("GlobalParameterResource", pipeline_response) + deserialized = self._deserialize("GlobalParameterResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -570,7 +564,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py index 6514255e5e815..a925184863fb9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -328,7 +326,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -342,7 +339,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -391,7 +388,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -540,7 +536,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -554,7 +549,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response) + deserialized = self._deserialize("SelfHostedIntegrationRuntimeNode", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -603,7 +598,6 @@ def get_ip_address( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -617,7 +611,7 @@ def get_ip_address( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeNodeIpAddress", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py index da07d23ee07c2..1fc29bcb90176 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterator, Optional, Type, TypeVar, Union, cast, overload from azure.core.exceptions import ( ClientAuthenticationError, @@ -19,9 +19,8 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -29,7 +28,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -162,7 +160,7 @@ def __init__(self, *args, **kwargs): def _refresh_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.SsisObjectMetadataStatusResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -175,7 +173,7 @@ def _refresh_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_refresh_request( resource_group_name=resource_group_name, @@ -186,10 +184,9 @@ def _refresh_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -197,12 +194,15 @@ def _refresh_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -246,10 +246,11 @@ def begin_refresh( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -397,7 +398,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -411,7 +411,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response) + deserialized = self._deserialize("SsisObjectMetadataListResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py index a697acdc95b6d..830d6aeaefaf8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -21,9 +21,8 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +30,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -942,7 +940,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -958,7 +955,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -1121,7 +1117,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1135,7 +1130,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1191,7 +1186,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1207,7 +1201,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1335,7 +1329,6 @@ def update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1349,7 +1342,7 @@ def update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1395,7 +1388,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1451,7 +1443,6 @@ def get_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1465,7 +1456,7 @@ def get_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1513,7 +1504,6 @@ def list_outbound_network_dependencies_endpoints( # pylint: disable=name-too-lo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1528,7 +1518,7 @@ def list_outbound_network_dependencies_endpoints( # pylint: disable=name-too-lo raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize( - "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response + "IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse", pipeline_response.http_response ) if cls: @@ -1576,7 +1566,6 @@ def get_connection_info( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1590,7 +1579,7 @@ def get_connection_info( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeConnectionInfo", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1719,7 +1708,6 @@ def regenerate_auth_key( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1733,7 +1721,7 @@ def regenerate_auth_key( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1779,7 +1767,6 @@ def list_auth_keys( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1793,7 +1780,7 @@ def list_auth_keys( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeAuthKeys", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1802,7 +1789,7 @@ def list_auth_keys( def _start_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> Optional[_models.IntegrationRuntimeStatusResponse]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1815,7 +1802,7 @@ def _start_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1826,10 +1813,9 @@ def _start_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1837,12 +1823,15 @@ def _start_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1886,10 +1875,11 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1911,9 +1901,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _stop_initial( # pylint: disable=inconsistent-return-statements + def _stop_initial( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1926,7 +1916,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1937,10 +1927,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1948,11 +1937,20 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + if response.status_code == 200: + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_stop( @@ -1979,7 +1977,7 @@ def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, @@ -1989,6 +1987,7 @@ def begin_stop( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -2052,7 +2051,6 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2109,7 +2107,6 @@ def get_monitoring_data( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2123,7 +2120,7 @@ def get_monitoring_data( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeMonitoringData", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2169,7 +2166,6 @@ def upgrade( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2310,7 +2306,6 @@ def remove_links( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2450,7 +2445,6 @@ def create_linked_integration_runtime( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -2464,7 +2458,7 @@ def create_linked_integration_runtime( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response) + deserialized = self._deserialize("IntegrationRuntimeStatusResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py index 336723385af27..63d7ce3107512 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -304,7 +302,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -320,7 +317,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -482,7 +478,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -496,7 +491,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -552,7 +547,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -568,7 +562,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("LinkedServiceResource", pipeline_response) + deserialized = self._deserialize("LinkedServiceResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -614,7 +608,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py index d6d035e10c31b..20257def35883 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -347,7 +345,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -363,7 +360,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -536,7 +532,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -550,7 +545,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -610,7 +605,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -624,7 +618,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response) + deserialized = self._deserialize("ManagedPrivateEndpointResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -678,7 +672,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py index 8e8d3c925a40b..f13244070d734 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -257,7 +255,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -273,7 +270,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -436,7 +432,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -450,7 +445,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -506,7 +501,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -520,7 +514,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response) + deserialized = self._deserialize("ManagedVirtualNetworkResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py index 6cc126f9ab3d5..b6be3c7fe2493 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -109,7 +107,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -125,7 +122,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py index 2a211e013a6eb..9fc1d2636bd2d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -299,7 +297,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -313,7 +310,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("PipelineRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -357,7 +354,6 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -371,7 +367,7 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineRun", pipeline_response) + deserialized = self._deserialize("PipelineRun", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -426,7 +422,6 @@ def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 6d3f7ef8bae2d..573caa3617a6a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -21,15 +21,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -374,7 +372,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -390,7 +387,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -549,7 +545,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -563,7 +558,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -619,7 +614,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -635,7 +629,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("PipelineResource", pipeline_response) + deserialized = self._deserialize("PipelineResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -681,7 +675,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -879,7 +872,6 @@ def create_run( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -893,7 +885,7 @@ def create_run( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("CreateRunResponse", pipeline_response) + deserialized = self._deserialize("CreateRunResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py index 99924ff8aef82..3976f19d464bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py @@ -20,15 +20,13 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -141,7 +139,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -157,7 +154,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py index 4ae9311014f3f..c731ff51c36d6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -346,7 +344,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -360,7 +357,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -416,7 +413,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -430,7 +426,7 @@ def get( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -476,7 +472,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py index 69246e7ba86ba..02ddf24d59913 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py @@ -18,15 +18,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -130,7 +128,6 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -144,7 +141,7 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesWrapper", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py index b7287aa292e7b..885ad4224f886 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py @@ -19,15 +19,13 @@ map_error, ) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -242,7 +240,6 @@ def rerun( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -301,7 +298,6 @@ def cancel( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -425,7 +421,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -439,7 +434,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerRunsQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index 34014519da017..ec5def64a3ced 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -8,7 +8,7 @@ # -------------------------------------------------------------------------- from io import IOBase import sys -from typing import Any, Callable, Dict, IO, Iterable, Optional, Type, TypeVar, Union, cast, overload +from typing import Any, Callable, Dict, IO, Iterable, Iterator, Optional, Type, TypeVar, Union, cast, overload import urllib.parse from azure.core.exceptions import ( @@ -21,9 +21,8 @@ ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.core.rest import HttpRequest +from azure.core.rest import HttpRequest, HttpResponse from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat @@ -31,7 +30,6 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping @@ -588,7 +586,6 @@ def prepare_request(next_link=None): headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) else: @@ -604,7 +601,6 @@ def prepare_request(next_link=None): _request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _request.method = "GET" return _request @@ -740,7 +736,6 @@ def query_by_factory( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -754,7 +749,7 @@ def query_by_factory( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerQueryResponse", pipeline_response) + deserialized = self._deserialize("TriggerQueryResponse", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -891,7 +886,6 @@ def create_or_update( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -905,7 +899,7 @@ def create_or_update( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -961,7 +955,6 @@ def get( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -977,7 +970,7 @@ def get( deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerResource", pipeline_response) + deserialized = self._deserialize("TriggerResource", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1023,7 +1016,6 @@ def delete( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1042,7 +1034,7 @@ def delete( # pylint: disable=inconsistent-return-statements def _subscribe_to_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1055,7 +1047,7 @@ def _subscribe_to_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_subscribe_to_events_request( resource_group_name=resource_group_name, @@ -1066,10 +1058,9 @@ def _subscribe_to_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1077,12 +1068,15 @@ def _subscribe_to_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1126,10 +1120,11 @@ def begin_subscribe_to_events( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1190,7 +1185,6 @@ def get_event_subscription_status( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) _stream = False @@ -1204,7 +1198,7 @@ def get_event_subscription_status( map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1213,7 +1207,7 @@ def get_event_subscription_status( def _unsubscribe_from_events_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> Optional[_models.TriggerSubscriptionOperationStatus]: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1226,7 +1220,7 @@ def _unsubscribe_from_events_initial( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_unsubscribe_from_events_request( resource_group_name=resource_group_name, @@ -1237,10 +1231,9 @@ def _unsubscribe_from_events_initial( headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1248,12 +1241,15 @@ def _unsubscribe_from_events_initial( response = pipeline_response.http_response if response.status_code not in [200, 202]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) - deserialized = None if response.status_code == 200: - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = response.stream_download(self._client._pipeline) + + if response.status_code == 202: + deserialized = response.stream_download(self._client._pipeline) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1297,10 +1293,11 @@ def begin_unsubscribe_from_events( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response) + deserialized = self._deserialize("TriggerSubscriptionOperationStatus", pipeline_response.http_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1322,9 +1319,9 @@ def get_long_running_output(pipeline_response): self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - def _start_initial( # pylint: disable=inconsistent-return-statements + def _start_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1337,7 +1334,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_start_request( resource_group_name=resource_group_name, @@ -1348,10 +1345,9 @@ def _start_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1359,11 +1355,16 @@ def _start_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_start( @@ -1390,7 +1391,7 @@ def begin_start( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( # type: ignore + raw_result = self._start_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1400,6 +1401,7 @@ def begin_start( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements @@ -1421,9 +1423,9 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- ) return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore - def _stop_initial( # pylint: disable=inconsistent-return-statements + def _stop_initial( self, resource_group_name: str, factory_name: str, trigger_name: str, **kwargs: Any - ) -> None: + ) -> Iterator[bytes]: error_map: MutableMapping[int, Type[HttpResponseError]] = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1436,7 +1438,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[None] = kwargs.pop("cls", None) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) _request = build_stop_request( resource_group_name=resource_group_name, @@ -1447,10 +1449,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements headers=_headers, params=_params, ) - _request = _convert_request(_request) _request.url = self._client.format_url(_request.url) - _stream = False + _stream = True pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1458,11 +1459,16 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements response = pipeline_response.http_response if response.status_code not in [200]: + response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) + deserialized = response.stream_download(self._client._pipeline) + if cls: - return cls(pipeline_response, None, {}) # type: ignore + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore @distributed_trace def begin_stop( @@ -1489,7 +1495,7 @@ def begin_stop( lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( # type: ignore + raw_result = self._stop_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -1499,6 +1505,7 @@ def begin_stop( params=_params, **kwargs ) + raw_result.http_response.read() # type: ignore kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py new file mode 100644 index 0000000000000..6d9707a4e6e20 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/conftest.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# aovid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + datafactorymanagement_subscription_id = os.environ.get( + "AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + datafactorymanagement_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + datafactorymanagement_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + datafactorymanagement_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=datafactorymanagement_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=datafactorymanagement_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=datafactorymanagement_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=datafactorymanagement_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py new file mode 100644 index 0000000000000..30425facce342 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementActivityRunsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_by_pipeline_run(self, resource_group): + response = self.client.activity_runs.query_by_pipeline_run( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py new file mode 100644 index 0000000000000..26a8455f02dee --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_activity_runs_operations_async.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementActivityRunsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_by_pipeline_run(self, resource_group): + response = await self.client.activity_runs.query_by_pipeline_run( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py new file mode 100644 index 0000000000000..edee8c213e4d7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations.py @@ -0,0 +1,190 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementChangeDataCaptureOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.change_data_capture.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.change_data_capture.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + change_data_capture={ + "policy": {"mode": "str", "recurrence": {"frequency": "str", "interval": 0}}, + "sourceConnectionsInfo": [ + { + "connection": { + "type": "str", + "commonDslConnectorProperties": [{"name": "str", "value": {}}], + "isInlineDataset": bool, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + "linkedServiceType": "str", + }, + "sourceEntities": [ + { + "dslConnectorProperties": [{"name": "str", "value": {}}], + "name": "str", + "schema": [{"dataType": "str", "name": "str"}], + } + ], + } + ], + "targetConnectionsInfo": [ + { + "connection": { + "type": "str", + "commonDslConnectorProperties": [{"name": "str", "value": {}}], + "isInlineDataset": bool, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + "linkedServiceType": "str", + }, + "dataMapperMappings": [ + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReference": { + "entity": "str", + "entityConnectionReference": {"connectionName": "str", "type": "str"}, + "name": "str", + }, + "attributeReferences": [ + { + "entity": "str", + "entityConnectionReference": { + "connectionName": "str", + "type": "str", + }, + "name": "str", + } + ], + "expression": "str", + "functionName": "str", + "name": "str", + "type": "str", + } + ] + }, + "sourceConnectionReference": {"connectionName": "str", "type": "str"}, + "sourceDenormalizeInfo": {}, + "sourceEntityName": "str", + "targetEntityName": "str", + } + ], + "relationships": [{}], + "targetEntities": [ + { + "dslConnectorProperties": [{"name": "str", "value": {}}], + "name": "str", + "schema": [{"dataType": "str", "name": "str"}], + } + ], + } + ], + "allowVNetOverride": bool, + "description": "str", + "etag": "str", + "folder": {"name": "str"}, + "id": "str", + "name": "str", + "status": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.change_data_capture.get( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.change_data_capture.delete( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_start(self, resource_group): + response = self.client.change_data_capture.start( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_stop(self, resource_group): + response = self.client.change_data_capture.stop( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_status(self, resource_group): + response = self.client.change_data_capture.status( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py new file mode 100644 index 0000000000000..325a9f0e1cf96 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_change_data_capture_operations_async.py @@ -0,0 +1,191 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementChangeDataCaptureOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.change_data_capture.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.change_data_capture.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + change_data_capture={ + "policy": {"mode": "str", "recurrence": {"frequency": "str", "interval": 0}}, + "sourceConnectionsInfo": [ + { + "connection": { + "type": "str", + "commonDslConnectorProperties": [{"name": "str", "value": {}}], + "isInlineDataset": bool, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + "linkedServiceType": "str", + }, + "sourceEntities": [ + { + "dslConnectorProperties": [{"name": "str", "value": {}}], + "name": "str", + "schema": [{"dataType": "str", "name": "str"}], + } + ], + } + ], + "targetConnectionsInfo": [ + { + "connection": { + "type": "str", + "commonDslConnectorProperties": [{"name": "str", "value": {}}], + "isInlineDataset": bool, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + "linkedServiceType": "str", + }, + "dataMapperMappings": [ + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReference": { + "entity": "str", + "entityConnectionReference": {"connectionName": "str", "type": "str"}, + "name": "str", + }, + "attributeReferences": [ + { + "entity": "str", + "entityConnectionReference": { + "connectionName": "str", + "type": "str", + }, + "name": "str", + } + ], + "expression": "str", + "functionName": "str", + "name": "str", + "type": "str", + } + ] + }, + "sourceConnectionReference": {"connectionName": "str", "type": "str"}, + "sourceDenormalizeInfo": {}, + "sourceEntityName": "str", + "targetEntityName": "str", + } + ], + "relationships": [{}], + "targetEntities": [ + { + "dslConnectorProperties": [{"name": "str", "value": {}}], + "name": "str", + "schema": [{"dataType": "str", "name": "str"}], + } + ], + } + ], + "allowVNetOverride": bool, + "description": "str", + "etag": "str", + "folder": {"name": "str"}, + "id": "str", + "name": "str", + "status": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.change_data_capture.get( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.change_data_capture.delete( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_start(self, resource_group): + response = await self.client.change_data_capture.start( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_stop(self, resource_group): + response = await self.client.change_data_capture.stop( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_status(self, resource_group): + response = await self.client.change_data_capture.status( + resource_group_name=resource_group.name, + factory_name="str", + change_data_capture_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py new file mode 100644 index 0000000000000..aa12eb5bb295b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementCredentialOperationsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.credential_operations.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.credential_operations.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + credential={"properties": "credential", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.credential_operations.get( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.credential_operations.delete( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py new file mode 100644 index 0000000000000..bc363029eb586 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_credential_operations_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementCredentialOperationsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.credential_operations.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.credential_operations.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + credential={"properties": "credential", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.credential_operations.get( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.credential_operations.delete( + resource_group_name=resource_group.name, + factory_name="str", + credential_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py new file mode 100644 index 0000000000000..009a428155ed7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDataFlowDebugSessionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_create(self, resource_group): + response = self.client.data_flow_debug_session.begin_create( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "computeType": "str", + "coreCount": 0, + "integrationRuntime": {"properties": "integration_runtime", "name": "str"}, + "timeToLive": 0, + }, + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_by_factory(self, resource_group): + response = self.client.data_flow_debug_session.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_add_data_flow(self, resource_group): + response = self.client.data_flow_debug_session.add_data_flow( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "dataFlow": {"properties": "data_flow", "name": "str"}, + "dataFlows": [{"properties": "data_flow", "name": "str"}], + "datasets": [{"properties": "dataset", "name": "str"}], + "debugSettings": { + "datasetParameters": {}, + "parameters": {"str": {}}, + "sourceSettings": [{"rowLimit": 0, "sourceName": "str"}], + }, + "linkedServices": [{"properties": "linked_service", "name": "str"}], + "sessionId": "str", + "staging": { + "folderPath": {}, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + }, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.data_flow_debug_session.delete( + resource_group_name=resource_group.name, + factory_name="str", + request={"sessionId": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_execute_command(self, resource_group): + response = self.client.data_flow_debug_session.begin_execute_command( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "command": "str", + "commandPayload": {"streamName": "str", "columns": ["str"], "expression": "str", "rowLimits": 0}, + "sessionId": "str", + }, + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py new file mode 100644 index 0000000000000..c8fc8f2c97803 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flow_debug_session_operations_async.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDataFlowDebugSessionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_create(self, resource_group): + response = await ( + await self.client.data_flow_debug_session.begin_create( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "computeType": "str", + "coreCount": 0, + "integrationRuntime": {"properties": "integration_runtime", "name": "str"}, + "timeToLive": 0, + }, + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_by_factory(self, resource_group): + response = self.client.data_flow_debug_session.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_add_data_flow(self, resource_group): + response = await self.client.data_flow_debug_session.add_data_flow( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "dataFlow": {"properties": "data_flow", "name": "str"}, + "dataFlows": [{"properties": "data_flow", "name": "str"}], + "datasets": [{"properties": "dataset", "name": "str"}], + "debugSettings": { + "datasetParameters": {}, + "parameters": {"str": {}}, + "sourceSettings": [{"rowLimit": 0, "sourceName": "str"}], + }, + "linkedServices": [{"properties": "linked_service", "name": "str"}], + "sessionId": "str", + "staging": { + "folderPath": {}, + "linkedService": {"referenceName": "str", "type": "str", "parameters": {"str": {}}}, + }, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.data_flow_debug_session.delete( + resource_group_name=resource_group.name, + factory_name="str", + request={"sessionId": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_execute_command(self, resource_group): + response = await ( + await self.client.data_flow_debug_session.begin_execute_command( + resource_group_name=resource_group.name, + factory_name="str", + request={ + "command": "str", + "commandPayload": {"streamName": "str", "columns": ["str"], "expression": "str", "rowLimits": 0}, + "sessionId": "str", + }, + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py new file mode 100644 index 0000000000000..3db90f62abd43 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDataFlowsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.data_flows.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + data_flow={"properties": "data_flow", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.data_flows.get( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.data_flows.delete( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.data_flows.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py new file mode 100644 index 0000000000000..41513ff09d7bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_data_flows_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDataFlowsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.data_flows.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + data_flow={"properties": "data_flow", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.data_flows.get( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.data_flows.delete( + resource_group_name=resource_group.name, + factory_name="str", + data_flow_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.data_flows.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py new file mode 100644 index 0000000000000..14348641640bd --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDatasetsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.datasets.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.datasets.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + dataset={"properties": "dataset", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.datasets.get( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.datasets.delete( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py new file mode 100644 index 0000000000000..97836ed4ceb58 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_datasets_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementDatasetsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.datasets.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.datasets.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + dataset={"properties": "dataset", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.datasets.get( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.datasets.delete( + resource_group_name=resource_group.name, + factory_name="str", + dataset_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py new file mode 100644 index 0000000000000..246081c8f53ff --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementExposureControlOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_feature_value(self, resource_group): + response = self.client.exposure_control.get_feature_value( + location_id="str", + exposure_control_request={"featureName": "str", "featureType": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_feature_value_by_factory(self, resource_group): + response = self.client.exposure_control.get_feature_value_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + exposure_control_request={"featureName": "str", "featureType": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_feature_values_by_factory(self, resource_group): + response = self.client.exposure_control.query_feature_values_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + exposure_control_batch_request={"exposureControlRequests": [{"featureName": "str", "featureType": "str"}]}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py new file mode 100644 index 0000000000000..a0ceb77d02033 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_exposure_control_operations_async.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementExposureControlOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_feature_value(self, resource_group): + response = await self.client.exposure_control.get_feature_value( + location_id="str", + exposure_control_request={"featureName": "str", "featureType": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_feature_value_by_factory(self, resource_group): + response = await self.client.exposure_control.get_feature_value_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + exposure_control_request={"featureName": "str", "featureType": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_feature_values_by_factory(self, resource_group): + response = await self.client.exposure_control.query_feature_values_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + exposure_control_batch_request={"exposureControlRequests": [{"featureName": "str", "featureType": "str"}]}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py new file mode 100644 index 0000000000000..94dbeb5cf9fb9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py @@ -0,0 +1,174 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementFactoriesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.factories.list( + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_configure_factory_repo(self, resource_group): + response = self.client.factories.configure_factory_repo( + location_id="str", + factory_repo_update={"factoryResourceId": "str", "repoConfiguration": "factory_repo_configuration"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_resource_group(self, resource_group): + response = self.client.factories.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.factories.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + factory={ + "createTime": "2020-02-20 00:00:00", + "eTag": "str", + "encryption": { + "keyName": "str", + "vaultBaseUrl": "str", + "identity": {"userAssignedIdentity": "str"}, + "keyVersion": "str", + }, + "globalParameters": {"str": {"type": "str", "value": {}}}, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {}}, + }, + "location": "str", + "name": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "purviewConfiguration": {"purviewResourceId": "str"}, + "repoConfiguration": "factory_repo_configuration", + "tags": {"str": "str"}, + "type": "str", + "version": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.factories.update( + resource_group_name=resource_group.name, + factory_name="str", + factory_update_parameters={ + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {}}, + }, + "publicNetworkAccess": "str", + "tags": {"str": "str"}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.factories.get( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.factories.delete( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_git_hub_access_token(self, resource_group): + response = self.client.factories.get_git_hub_access_token( + resource_group_name=resource_group.name, + factory_name="str", + git_hub_access_token_request={ + "gitHubAccessCode": "str", + "gitHubAccessTokenBaseUrl": "str", + "gitHubClientId": "str", + "gitHubClientSecret": {"byoaSecretAkvUrl": "str", "byoaSecretName": "str"}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_data_plane_access(self, resource_group): + response = self.client.factories.get_data_plane_access( + resource_group_name=resource_group.name, + factory_name="str", + policy={ + "accessResourcePath": "str", + "expireTime": "str", + "permissions": "str", + "profileName": "str", + "startTime": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py new file mode 100644 index 0000000000000..fd08741d55f96 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py @@ -0,0 +1,175 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementFactoriesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.factories.list( + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_configure_factory_repo(self, resource_group): + response = await self.client.factories.configure_factory_repo( + location_id="str", + factory_repo_update={"factoryResourceId": "str", "repoConfiguration": "factory_repo_configuration"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_resource_group(self, resource_group): + response = self.client.factories.list_by_resource_group( + resource_group_name=resource_group.name, + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.factories.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + factory={ + "createTime": "2020-02-20 00:00:00", + "eTag": "str", + "encryption": { + "keyName": "str", + "vaultBaseUrl": "str", + "identity": {"userAssignedIdentity": "str"}, + "keyVersion": "str", + }, + "globalParameters": {"str": {"type": "str", "value": {}}}, + "id": "str", + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {}}, + }, + "location": "str", + "name": "str", + "provisioningState": "str", + "publicNetworkAccess": "str", + "purviewConfiguration": {"purviewResourceId": "str"}, + "repoConfiguration": "factory_repo_configuration", + "tags": {"str": "str"}, + "type": "str", + "version": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.factories.update( + resource_group_name=resource_group.name, + factory_name="str", + factory_update_parameters={ + "identity": { + "type": "str", + "principalId": "str", + "tenantId": "str", + "userAssignedIdentities": {"str": {}}, + }, + "publicNetworkAccess": "str", + "tags": {"str": "str"}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.factories.get( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.factories.delete( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_git_hub_access_token(self, resource_group): + response = await self.client.factories.get_git_hub_access_token( + resource_group_name=resource_group.name, + factory_name="str", + git_hub_access_token_request={ + "gitHubAccessCode": "str", + "gitHubAccessTokenBaseUrl": "str", + "gitHubClientId": "str", + "gitHubClientSecret": {"byoaSecretAkvUrl": "str", "byoaSecretName": "str"}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_data_plane_access(self, resource_group): + response = await self.client.factories.get_data_plane_access( + resource_group_name=resource_group.name, + factory_name="str", + policy={ + "accessResourcePath": "str", + "expireTime": "str", + "permissions": "str", + "profileName": "str", + "startTime": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py new file mode 100644 index 0000000000000..55324ca068c38 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementGlobalParametersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.global_parameters.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.global_parameters.get( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.global_parameters.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + default={ + "properties": {"str": {"type": "str", "value": {}}}, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.global_parameters.delete( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py new file mode 100644 index 0000000000000..704e5a26b0cd4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_global_parameters_operations_async.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementGlobalParametersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.global_parameters.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.global_parameters.get( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.global_parameters.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + default={ + "properties": {"str": {"type": "str", "value": {}}}, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.global_parameters.delete( + resource_group_name=resource_group.name, + factory_name="str", + global_parameter_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py new file mode 100644 index 0000000000000..f8966ee56c401 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimeNodesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.integration_runtime_nodes.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.integration_runtime_nodes.delete( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.integration_runtime_nodes.update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + update_integration_runtime_node_request={"concurrentJobsLimit": 0}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_ip_address(self, resource_group): + response = self.client.integration_runtime_nodes.get_ip_address( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py new file mode 100644 index 0000000000000..52b99df852d98 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_nodes_operations_async.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimeNodesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.integration_runtime_nodes.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.integration_runtime_nodes.delete( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.integration_runtime_nodes.update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + update_integration_runtime_node_request={"concurrentJobsLimit": 0}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_ip_address(self, resource_group): + response = await self.client.integration_runtime_nodes.get_ip_address( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + node_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py new file mode 100644 index 0000000000000..8b3f0ea1ecf68 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimeObjectMetadataOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_refresh(self, resource_group): + response = self.client.integration_runtime_object_metadata.begin_refresh( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.integration_runtime_object_metadata.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py new file mode 100644 index 0000000000000..1d095b566c430 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtime_object_metadata_operations_async.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimeObjectMetadataOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_refresh(self, resource_group): + response = await ( + await self.client.integration_runtime_object_metadata.begin_refresh( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.integration_runtime_object_metadata.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py new file mode 100644 index 0000000000000..7816caf1974ba --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations.py @@ -0,0 +1,255 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.integration_runtimes.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.integration_runtimes.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + integration_runtime={ + "properties": "integration_runtime", + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.integration_runtimes.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_update(self, resource_group): + response = self.client.integration_runtimes.update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + update_integration_runtime_request={"autoUpdate": "str", "updateDelayOffset": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.integration_runtimes.delete( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_status(self, resource_group): + response = self.client.integration_runtimes.get_status( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_outbound_network_dependencies_endpoints(self, resource_group): + response = self.client.integration_runtimes.list_outbound_network_dependencies_endpoints( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_connection_info(self, resource_group): + response = self.client.integration_runtimes.get_connection_info( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_regenerate_auth_key(self, resource_group): + response = self.client.integration_runtimes.regenerate_auth_key( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + regenerate_key_parameters={"keyName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_auth_keys(self, resource_group): + response = self.client.integration_runtimes.list_auth_keys( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.integration_runtimes.begin_start( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_stop(self, resource_group): + response = self.client.integration_runtimes.begin_stop( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_sync_credentials(self, resource_group): + response = self.client.integration_runtimes.sync_credentials( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_monitoring_data(self, resource_group): + response = self.client.integration_runtimes.get_monitoring_data( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_upgrade(self, resource_group): + response = self.client.integration_runtimes.upgrade( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_remove_links(self, resource_group): + response = self.client.integration_runtimes.remove_links( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + linked_integration_runtime_request={"factoryName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_linked_integration_runtime(self, resource_group): + response = self.client.integration_runtimes.create_linked_integration_runtime( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + create_linked_integration_runtime_request={ + "dataFactoryLocation": "str", + "dataFactoryName": "str", + "name": "str", + "subscriptionId": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py new file mode 100644 index 0000000000000..50675e5348608 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_integration_runtimes_operations_async.py @@ -0,0 +1,260 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementIntegrationRuntimesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.integration_runtimes.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.integration_runtimes.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + integration_runtime={ + "properties": "integration_runtime", + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.integration_runtimes.get( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_update(self, resource_group): + response = await self.client.integration_runtimes.update( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + update_integration_runtime_request={"autoUpdate": "str", "updateDelayOffset": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.integration_runtimes.delete( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_status(self, resource_group): + response = await self.client.integration_runtimes.get_status( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_outbound_network_dependencies_endpoints(self, resource_group): + response = await self.client.integration_runtimes.list_outbound_network_dependencies_endpoints( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_connection_info(self, resource_group): + response = await self.client.integration_runtimes.get_connection_info( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_regenerate_auth_key(self, resource_group): + response = await self.client.integration_runtimes.regenerate_auth_key( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + regenerate_key_parameters={"keyName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_auth_keys(self, resource_group): + response = await self.client.integration_runtimes.list_auth_keys( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.integration_runtimes.begin_start( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_stop(self, resource_group): + response = await ( + await self.client.integration_runtimes.begin_stop( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_sync_credentials(self, resource_group): + response = await self.client.integration_runtimes.sync_credentials( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_monitoring_data(self, resource_group): + response = await self.client.integration_runtimes.get_monitoring_data( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_upgrade(self, resource_group): + response = await self.client.integration_runtimes.upgrade( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_remove_links(self, resource_group): + response = await self.client.integration_runtimes.remove_links( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + linked_integration_runtime_request={"factoryName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_linked_integration_runtime(self, resource_group): + response = await self.client.integration_runtimes.create_linked_integration_runtime( + resource_group_name=resource_group.name, + factory_name="str", + integration_runtime_name="str", + create_linked_integration_runtime_request={ + "dataFactoryLocation": "str", + "dataFactoryName": "str", + "name": "str", + "subscriptionId": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py new file mode 100644 index 0000000000000..860fca87ee4dc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementLinkedServicesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.linked_services.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.linked_services.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + linked_service={"properties": "linked_service", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.linked_services.get( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.linked_services.delete( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py new file mode 100644 index 0000000000000..4e76602593684 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_linked_services_operations_async.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementLinkedServicesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.linked_services.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.linked_services.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + linked_service={"properties": "linked_service", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.linked_services.get( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.linked_services.delete( + resource_group_name=resource_group.name, + factory_name="str", + linked_service_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py new file mode 100644 index 0000000000000..14df0f72198bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementManagedPrivateEndpointsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.managed_private_endpoints.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.managed_private_endpoints.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + managed_private_endpoint={ + "properties": { + "connectionState": {"actionsRequired": "str", "description": "str", "status": "str"}, + "fqdns": ["str"], + "groupId": "str", + "isReserved": bool, + "privateLinkResourceId": "str", + "provisioningState": "str", + }, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.managed_private_endpoints.get( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.managed_private_endpoints.delete( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py new file mode 100644 index 0000000000000..5fbd619b88710 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_private_endpoints_operations_async.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementManagedPrivateEndpointsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.managed_private_endpoints.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.managed_private_endpoints.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + managed_private_endpoint={ + "properties": { + "connectionState": {"actionsRequired": "str", "description": "str", "status": "str"}, + "fqdns": ["str"], + "groupId": "str", + "isReserved": bool, + "privateLinkResourceId": "str", + "provisioningState": "str", + }, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.managed_private_endpoints.get( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.managed_private_endpoints.delete( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_private_endpoint_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py new file mode 100644 index 0000000000000..fb8000b8e9853 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementManagedVirtualNetworksOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.managed_virtual_networks.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.managed_virtual_networks.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_virtual_network={ + "properties": {"alias": "str", "vNetId": "str"}, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.managed_virtual_networks.get( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py new file mode 100644 index 0000000000000..73dfcaa3240bf --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_managed_virtual_networks_operations_async.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementManagedVirtualNetworksOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.managed_virtual_networks.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.managed_virtual_networks.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + managed_virtual_network={ + "properties": {"alias": "str", "vNetId": "str"}, + "etag": "str", + "id": "str", + "name": "str", + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.managed_virtual_networks.get( + resource_group_name=resource_group.name, + factory_name="str", + managed_virtual_network_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py new file mode 100644 index 0000000000000..cc44ace480d24 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py new file mode 100644 index 0000000000000..17590fc1dfcaa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_operations_async.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list(self, resource_group): + response = self.client.operations.list( + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py new file mode 100644 index 0000000000000..5836d281d1f32 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPipelineRunsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_by_factory(self, resource_group): + response = self.client.pipeline_runs.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.pipeline_runs.get( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_cancel(self, resource_group): + response = self.client.pipeline_runs.cancel( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py new file mode 100644 index 0000000000000..acd95502267f7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipeline_runs_operations_async.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPipelineRunsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_by_factory(self, resource_group): + response = await self.client.pipeline_runs.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.pipeline_runs.get( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_cancel(self, resource_group): + response = await self.client.pipeline_runs.cancel( + resource_group_name=resource_group.name, + factory_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py new file mode 100644 index 0000000000000..055cc41ae31eb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPipelinesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.pipelines.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.pipelines.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + pipeline={ + "activities": ["activity"], + "annotations": [{}], + "concurrency": 0, + "description": "str", + "etag": "str", + "folder": {"name": "str"}, + "id": "str", + "name": "str", + "parameters": {"str": {"type": "str", "defaultValue": {}}}, + "policy": {"elapsedTimeMetric": {"duration": {}}}, + "runDimensions": {"str": {}}, + "type": "str", + "variables": {"str": {"type": "str", "defaultValue": {}}}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.pipelines.get( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.pipelines.delete( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_run(self, resource_group): + response = self.client.pipelines.create_run( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py new file mode 100644 index 0000000000000..2be64cd4e6ab2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_pipelines_operations_async.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPipelinesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.pipelines.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.pipelines.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + pipeline={ + "activities": ["activity"], + "annotations": [{}], + "concurrency": 0, + "description": "str", + "etag": "str", + "folder": {"name": "str"}, + "id": "str", + "name": "str", + "parameters": {"str": {"type": "str", "defaultValue": {}}}, + "policy": {"elapsedTimeMetric": {"duration": {}}}, + "runDimensions": {"str": {}}, + "type": "str", + "variables": {"str": {"type": "str", "defaultValue": {}}}, + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.pipelines.get( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.pipelines.delete( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_run(self, resource_group): + response = await self.client.pipelines.create_run( + resource_group_name=resource_group.name, + factory_name="str", + pipeline_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py new file mode 100644 index 0000000000000..9cfadc91770c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateEndPointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.private_end_point_connections.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py new file mode 100644 index 0000000000000..c152ee8bc1138 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_end_point_connections_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateEndPointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.private_end_point_connections.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py new file mode 100644 index 0000000000000..6ac9901e27e24 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateEndpointConnectionOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.private_endpoint_connection.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + private_endpoint_wrapper={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + }, + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_endpoint_connection.get( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.private_endpoint_connection.delete( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py new file mode 100644 index 0000000000000..f2cda1711bc26 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_endpoint_connection_operations_async.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateEndpointConnectionOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.private_endpoint_connection.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + private_endpoint_wrapper={ + "etag": "str", + "id": "str", + "name": "str", + "properties": { + "privateEndpoint": {"id": "str"}, + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + }, + "type": "str", + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_endpoint_connection.get( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.private_endpoint_connection.delete( + resource_group_name=resource_group.name, + factory_name="str", + private_endpoint_connection_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py new file mode 100644 index 0000000000000..30ec19b28be78 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.private_link_resources.get( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py new file mode 100644 index 0000000000000..1625dcd3f94ad --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_private_link_resources_operations_async.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.private_link_resources.get( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py new file mode 100644 index 0000000000000..2f0c2a0fc27fa --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementTriggerRunsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_rerun(self, resource_group): + response = self.client.trigger_runs.rerun( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_cancel(self, resource_group): + response = self.client.trigger_runs.cancel( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_by_factory(self, resource_group): + response = self.client.trigger_runs.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py new file mode 100644 index 0000000000000..cc17d0de01c99 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_trigger_runs_operations_async.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementTriggerRunsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_rerun(self, resource_group): + response = await self.client.trigger_runs.rerun( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_cancel(self, resource_group): + response = await self.client.trigger_runs.cancel( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + run_id="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_by_factory(self, resource_group): + response = await self.client.trigger_runs.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={ + "lastUpdatedAfter": "2020-02-20 00:00:00", + "lastUpdatedBefore": "2020-02-20 00:00:00", + "continuationToken": "str", + "filters": [{"operand": "str", "operator": "str", "values": ["str"]}], + "orderBy": [{"order": "str", "orderBy": "str"}], + }, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py new file mode 100644 index 0000000000000..24be0b35dec45 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations.py @@ -0,0 +1,149 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementTriggersOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_list_by_factory(self, resource_group): + response = self.client.triggers.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_query_by_factory(self, resource_group): + response = self.client.triggers.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={"continuationToken": "str", "parentTriggerName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_create_or_update(self, resource_group): + response = self.client.triggers.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + trigger={"properties": "trigger", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get(self, resource_group): + response = self.client.triggers.get( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_delete(self, resource_group): + response = self.client.triggers.delete( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_subscribe_to_events(self, resource_group): + response = self.client.triggers.begin_subscribe_to_events( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_get_event_subscription_status(self, resource_group): + response = self.client.triggers.get_event_subscription_status( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_unsubscribe_from_events(self, resource_group): + response = self.client.triggers.begin_unsubscribe_from_events( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_start(self, resource_group): + response = self.client.triggers.begin_start( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_begin_stop(self, resource_group): + response = self.client.triggers.begin_stop( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py new file mode 100644 index 0000000000000..53e316ff588ed --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_triggers_operations_async.py @@ -0,0 +1,158 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.datafactory.aio import DataFactoryManagementClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestDataFactoryManagementTriggersOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_list_by_factory(self, resource_group): + response = self.client.triggers.list_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + api_version="2018-06-01", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_query_by_factory(self, resource_group): + response = await self.client.triggers.query_by_factory( + resource_group_name=resource_group.name, + factory_name="str", + filter_parameters={"continuationToken": "str", "parentTriggerName": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_create_or_update(self, resource_group): + response = await self.client.triggers.create_or_update( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + trigger={"properties": "trigger", "etag": "str", "id": "str", "name": "str", "type": "str"}, + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get(self, resource_group): + response = await self.client.triggers.get( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_delete(self, resource_group): + response = await self.client.triggers.delete( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_subscribe_to_events(self, resource_group): + response = await ( + await self.client.triggers.begin_subscribe_to_events( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_get_event_subscription_status(self, resource_group): + response = await self.client.triggers.get_event_subscription_status( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_unsubscribe_from_events(self, resource_group): + response = await ( + await self.client.triggers.begin_unsubscribe_from_events( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_start(self, resource_group): + response = await ( + await self.client.triggers.begin_start( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_begin_stop(self, resource_group): + response = await ( + await self.client.triggers.begin_stop( + resource_group_name=resource_group.name, + factory_name="str", + trigger_name="str", + api_version="2018-06-01", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 59a3dd52eeeab..907fd55998d6c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -75,6 +75,7 @@ }, install_requires=[ "isodate>=0.6.1", + "typing-extensions>=4.6.0", "azure-common>=1.1", "azure-mgmt-core>=1.3.2", ],