Skip to content

Commit

Permalink
CodeGen from PR 26561 in Azure/azure-rest-api-specs
Browse files Browse the repository at this point in the history
Merge c058548baf8c963f7c9c43cc6ad03029e615a45c into b13bd252f5a0ae3e870dcd5fb4dc5c1389a7a734
  • Loading branch information
SDKAuto committed Nov 6, 2023
1 parent 86759ff commit 2ba36ee
Show file tree
Hide file tree
Showing 89 changed files with 7,784 additions and 2,793 deletions.
10 changes: 5 additions & 5 deletions sdk/datafactory/azure-mgmt-datafactory/_meta.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
{
"commit": "d70ba571ddb519a63aa2ad96a96faff04647f81c",
"commit": "d8e29bac77468d4ec352222b417591047418b7a2",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest": "3.9.2",
"autorest": "3.9.7",
"use": [
"@autorest/python@6.4.0",
"@autorest/modelerfour@4.24.3"
"@autorest/python@6.7.1",
"@autorest/modelerfour@4.26.2"
],
"autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.4.0 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False",
"autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False",
"readme": "specification/datafactory/resource-manager/readme.md"
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

import sys
from typing import Any, TYPE_CHECKING

from azure.core.configuration import Configuration
Expand All @@ -15,11 +14,6 @@

from ._version import VERSION

if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
Expand All @@ -42,7 +36,7 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl

def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)
api_version: Literal["2018-06-01"] = kwargs.pop("api_version", "2018-06-01")
api_version: str = kwargs.pop("api_version", "2018-06-01")

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from ._serialization import Deserializer, Serializer
from .operations import (
ActivityRunsOperations,
ChangeDataCaptureOperations,
CredentialOperationsOperations,
DataFlowDebugSessionOperations,
DataFlowsOperations,
Expand Down Expand Up @@ -103,6 +104,8 @@ class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version
azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations
:ivar global_parameters: GlobalParametersOperations operations
:vartype global_parameters: azure.mgmt.datafactory.operations.GlobalParametersOperations
:ivar change_data_capture: ChangeDataCaptureOperations operations
:vartype change_data_capture: azure.mgmt.datafactory.operations.ChangeDataCaptureOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier. Required.
Expand All @@ -126,7 +129,7 @@ def __init__(
self._config = DataFactoryManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)

client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
Expand Down Expand Up @@ -178,6 +181,9 @@ def __init__(
self.global_parameters = GlobalParametersOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.change_data_capture = ChangeDataCaptureOperations(
self._client, self._config, self._serialize, self._deserialize
)

def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
if xml_desc.get("attr", False):
if xml_ns:
ET.register_namespace(xml_prefix, xml_ns)
xml_name = "{}{}".format(xml_ns, xml_name)
xml_name = "{{{}}}{}".format(xml_ns, xml_name)
serialized.set(xml_name, new_attr) # type: ignore
continue
if xml_desc.get("text", False):
Expand Down Expand Up @@ -662,8 +662,9 @@ def _serialize(self, target_obj, data_type=None, **kwargs):
_serialized.update(_new_attr) # type: ignore
_new_attr = _new_attr[k] # type: ignore
_serialized = _serialized[k]
except ValueError:
continue
except ValueError as err:
if isinstance(err, SerializationError):
raise

except (AttributeError, KeyError, TypeError) as err:
msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
Expand Down Expand Up @@ -741,6 +742,8 @@ def query(self, name, data, data_type, **kwargs):
:param data: The data to be serialized.
:param str data_type: The type to be serialized from.
:keyword bool skip_quote: Whether to skip quote the serialized result.
Defaults to False.
:rtype: str
:raises: TypeError if serialization fails.
:raises: ValueError if data is None
Expand All @@ -749,10 +752,8 @@ def query(self, name, data, data_type, **kwargs):
# Treat the list aside, since we don't want to encode the div separator
if data_type.startswith("["):
internal_data_type = data_type[1:-1]
data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data]
if not kwargs.get("skip_quote", False):
data = [quote(str(d), safe="") for d in data]
return str(self.serialize_iter(data, internal_data_type, **kwargs))
do_quote = not kwargs.get("skip_quote", False)
return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs))

# Not a list, regular serialization
output = self.serialize_data(data, data_type, **kwargs)
Expand Down Expand Up @@ -891,6 +892,8 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
not be None or empty.
:param str div: If set, this str will be used to combine the elements
in the iterable into a combined string. Default is 'None'.
:keyword bool do_quote: Whether to quote the serialized result of each iterable element.
Defaults to False.
:rtype: list, str
"""
if isinstance(data, str):
Expand All @@ -903,9 +906,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs):
for d in data:
try:
serialized.append(self.serialize_data(d, iter_type, **kwargs))
except ValueError:
except ValueError as err:
if isinstance(err, SerializationError):
raise
serialized.append(None)

if kwargs.get("do_quote", False):
serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]

if div:
serialized = ["" if s is None else str(s) for s in serialized]
serialized = div.join(serialized)
Expand Down Expand Up @@ -950,7 +958,9 @@ def serialize_dict(self, attr, dict_type, **kwargs):
for key, value in attr.items():
try:
serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
except ValueError:
except ValueError as err:
if isinstance(err, SerializationError):
raise
serialized[self.serialize_unicode(key)] = None

if "xml" in serialization_ctxt:
Expand Down Expand Up @@ -1271,7 +1281,7 @@ def _extract_name_from_internal_type(internal_type):
xml_name = internal_type_xml_map.get("name", internal_type.__name__)
xml_ns = internal_type_xml_map.get("ns", None)
if xml_ns:
xml_name = "{}{}".format(xml_ns, xml_name)
xml_name = "{{{}}}{}".format(xml_ns, xml_name)
return xml_name


Expand All @@ -1295,7 +1305,7 @@ def xml_key_extractor(attr, attr_desc, data):
# Integrate namespace if necessary
xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
if xml_ns:
xml_name = "{}{}".format(xml_ns, xml_name)
xml_name = "{{{}}}{}".format(xml_ns, xml_name)

# If it's an attribute, that's simple
if xml_desc.get("attr", False):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from typing import List, cast

from azure.core.pipeline.transport import HttpRequest


Expand All @@ -16,15 +14,3 @@ def _convert_request(request, files=None):
if files:
request.set_formdata_body(files)
return request


def _format_url_section(template, **kwargs):
components = template.split("/")
while components:
try:
return template.format(**kwargs)
except KeyError as key:
# Need the cast, as for some reasons "split" is typed as list[str | Any]
formatted_components = cast(List[str], template.split("/"))
components = [c for c in formatted_components if "{}".format(key.args[0]) not in c]
template = "/".join(components)
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "3.1.0"
VERSION = "1.0.0"
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

import sys
from typing import Any, TYPE_CHECKING

from azure.core.configuration import Configuration
Expand All @@ -15,11 +14,6 @@

from .._version import VERSION

if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
Expand All @@ -42,7 +36,7 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl

def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)
api_version: Literal["2018-06-01"] = kwargs.pop("api_version", "2018-06-01")
api_version: str = kwargs.pop("api_version", "2018-06-01")

if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from ._configuration import DataFactoryManagementClientConfiguration
from .operations import (
ActivityRunsOperations,
ChangeDataCaptureOperations,
CredentialOperationsOperations,
DataFlowDebugSessionOperations,
DataFlowsOperations,
Expand Down Expand Up @@ -104,6 +105,8 @@ class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version
azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations
:ivar global_parameters: GlobalParametersOperations operations
:vartype global_parameters: azure.mgmt.datafactory.aio.operations.GlobalParametersOperations
:ivar change_data_capture: ChangeDataCaptureOperations operations
:vartype change_data_capture: azure.mgmt.datafactory.aio.operations.ChangeDataCaptureOperations
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The subscription identifier. Required.
Expand All @@ -127,7 +130,7 @@ def __init__(
self._config = DataFactoryManagementClientConfiguration(
credential=credential, subscription_id=subscription_id, **kwargs
)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)

client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
Expand Down Expand Up @@ -179,6 +182,9 @@ def __init__(
self.global_parameters = GlobalParametersOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.change_data_capture = ChangeDataCaptureOperations(
self._client, self._config, self._serialize, self._deserialize
)

def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations
from ._private_link_resources_operations import PrivateLinkResourcesOperations
from ._global_parameters_operations import GlobalParametersOperations
from ._change_data_capture_operations import ChangeDataCaptureOperations

from ._patch import __all__ as _patch_all
from ._patch import * # pylint: disable=unused-wildcard-import
Expand Down Expand Up @@ -56,6 +57,7 @@
"PrivateEndpointConnectionOperations",
"PrivateLinkResourcesOperations",
"GlobalParametersOperations",
"ChangeDataCaptureOperations",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from io import IOBase
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload

from azure.core.exceptions import (
Expand All @@ -28,10 +28,6 @@
from ..._vendor import _convert_request
from ...operations._activity_runs_operations import build_query_by_pipeline_run_request

if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]

Expand Down Expand Up @@ -154,16 +150,14 @@ async def query_by_pipeline_run(
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})

api_version: Literal["2018-06-01"] = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
)
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.ActivityRunsQueryResponse] = kwargs.pop("cls", None)

content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(filter_parameters, (IO, bytes)):
if isinstance(filter_parameters, (IOBase, bytes)):
_content = filter_parameters
else:
_json = self._serialize.body(filter_parameters, "RunFilterParameters")
Expand All @@ -184,8 +178,9 @@ async def query_by_pipeline_run(
request = _convert_request(request)
request.url = self._client.format_url(request.url)

_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
request, stream=_stream, **kwargs
)

response = pipeline_response.http_response
Expand Down
Loading

0 comments on commit 2ba36ee

Please sign in to comment.