From df3ceb41c42fc8885011b5a2a75d4cf5fcfef291 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 6 Nov 2023 05:46:45 +0000 Subject: [PATCH] CodeGen from PR 26561 in Azure/azure-rest-api-specs Merge ec49e5c5b828a2a10e201d6705b2ffa38f022883 into b13bd252f5a0ae3e870dcd5fb4dc5c1389a7a734 --- .../azure-mgmt-datafactory/_meta.json | 10 +- .../azure/mgmt/datafactory/_configuration.py | 8 +- .../_data_factory_management_client.py | 8 +- .../azure/mgmt/datafactory/_serialization.py | 32 +- .../azure/mgmt/datafactory/_vendor.py | 14 - .../azure/mgmt/datafactory/_version.py | 2 +- .../mgmt/datafactory/aio/_configuration.py | 8 +- .../aio/_data_factory_management_client.py | 8 +- .../datafactory/aio/operations/__init__.py | 2 + .../operations/_activity_runs_operations.py | 15 +- .../_change_data_capture_operations.py | 648 ++ .../_credential_operations_operations.py | 36 +- .../_data_flow_debug_session_operations.py | 57 +- .../aio/operations/_data_flows_operations.py | 36 +- .../aio/operations/_datasets_operations.py | 36 +- .../_exposure_control_operations.py | 33 +- .../aio/operations/_factories_operations.py | 79 +- .../_global_parameters_operations.py | 36 +- .../_integration_runtime_nodes_operations.py | 36 +- ...tion_runtime_object_metadata_operations.py | 26 +- .../_integration_runtimes_operations.py | 143 +- .../operations/_linked_services_operations.py | 36 +- .../_managed_private_endpoints_operations.py | 36 +- .../_managed_virtual_networks_operations.py | 29 +- .../datafactory/aio/operations/_operations.py | 12 +- .../operations/_pipeline_runs_operations.py | 29 +- .../aio/operations/_pipelines_operations.py | 44 +- ...rivate_end_point_connections_operations.py | 12 +- ..._private_endpoint_connection_operations.py | 29 +- .../_private_link_resources_operations.py | 12 +- .../operations/_trigger_runs_operations.py | 29 +- .../aio/operations/_triggers_operations.py | 96 +- .../azure/mgmt/datafactory/models/__init__.py | 66 +- .../_data_factory_management_client_enums.py | 62 +- .../mgmt/datafactory/models/_models_py3.py | 5213 ++++++++++++----- .../mgmt/datafactory/operations/__init__.py | 2 + .../operations/_activity_runs_operations.py | 21 +- .../_change_data_capture_operations.py | 986 ++++ .../_credential_operations_operations.py | 54 +- .../_data_flow_debug_session_operations.py | 79 +- .../operations/_data_flows_operations.py | 54 +- .../operations/_datasets_operations.py | 54 +- .../_exposure_control_operations.py | 47 +- .../operations/_factories_operations.py | 117 +- .../_global_parameters_operations.py | 54 +- .../_integration_runtime_nodes_operations.py | 54 +- ...tion_runtime_object_metadata_operations.py | 36 +- .../_integration_runtimes_operations.py | 213 +- .../operations/_linked_services_operations.py | 54 +- .../_managed_private_endpoints_operations.py | 54 +- .../_managed_virtual_networks_operations.py | 43 +- .../datafactory/operations/_operations.py | 14 +- .../operations/_pipeline_runs_operations.py | 43 +- .../operations/_pipelines_operations.py | 66 +- ...rivate_end_point_connections_operations.py | 18 +- ..._private_endpoint_connection_operations.py | 43 +- .../_private_link_resources_operations.py | 18 +- .../operations/_trigger_runs_operations.py | 43 +- .../operations/_triggers_operations.py | 138 +- .../change_data_capture_create.py | 441 ++ .../change_data_capture_delete.py | 41 + .../change_data_capture_get.py | 42 + .../change_data_capture_list_by_factory.py | 42 + .../change_data_capture_start.py | 41 + .../change_data_capture_status.py | 42 + .../change_data_capture_stop.py | 41 + .../change_data_capture_update.py | 513 ++ .../generated_samples/credentials_delete.py | 3 +- .../data_flow_debug_session_delete.py | 3 +- .../generated_samples/data_flows_delete.py | 3 +- .../generated_samples/datasets_delete.py | 3 +- .../delete_private_endpoint_connection.py | 3 +- .../generated_samples/factories_delete.py | 3 +- .../global_parameters_delete.py | 3 +- .../integration_runtime_nodes_delete.py | 3 +- .../integration_runtimes_delete.py | 3 +- .../integration_runtimes_remove_links.py | 3 +- .../integration_runtimes_stop.py | 3 +- .../integration_runtimes_sync_credentials.py | 3 +- .../integration_runtimes_upgrade.py | 3 +- .../linked_services_delete.py | 3 +- .../managed_private_endpoints_delete.py | 3 +- .../generated_samples/pipeline_runs_cancel.py | 3 +- .../generated_samples/pipelines_delete.py | 3 +- .../generated_samples/trigger_runs_cancel.py | 3 +- .../generated_samples/trigger_runs_rerun.py | 3 +- .../generated_samples/triggers_delete.py | 3 +- .../generated_samples/triggers_start.py | 3 +- .../generated_samples/triggers_stop.py | 3 +- 89 files changed, 7748 insertions(+), 2804 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_delete.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_get.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_list_by_factory.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_start.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_status.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_stop.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index 71e5b746f1bc..446cb1645879 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -1,11 +1,11 @@ { - "commit": "d70ba571ddb519a63aa2ad96a96faff04647f81c", + "commit": "67165fb1a99aa558dbd52e53d11dff6de9997769", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest": "3.9.2", + "autorest": "3.9.7", "use": [ - "@autorest/python@6.4.0", - "@autorest/modelerfour@4.24.3" + "@autorest/python@6.7.1", + "@autorest/modelerfour@4.26.2" ], - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.4.0 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py index d10dbe995788..77c8fb24db69 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_configuration.py @@ -6,7 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration @@ -15,11 +14,6 @@ from ._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential @@ -42,7 +36,7 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", "2018-06-01") + api_version: str = kwargs.pop("api_version", "2018-06-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index 599cf12f79b1..8c7a2f18d7e2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -17,6 +17,7 @@ from ._serialization import Deserializer, Serializer from .operations import ( ActivityRunsOperations, + ChangeDataCaptureOperations, CredentialOperationsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, @@ -103,6 +104,8 @@ class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :ivar global_parameters: GlobalParametersOperations operations :vartype global_parameters: azure.mgmt.datafactory.operations.GlobalParametersOperations + :ivar change_data_capture: ChangeDataCaptureOperations operations + :vartype change_data_capture: azure.mgmt.datafactory.operations.ChangeDataCaptureOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. Required. @@ -126,7 +129,7 @@ def __init__( self._config = DataFactoryManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) @@ -178,6 +181,9 @@ def __init__( self.global_parameters = GlobalParametersOperations( self._client, self._config, self._serialize, self._deserialize ) + self.change_data_capture = ChangeDataCaptureOperations( + self._client, self._config, self._serialize, self._deserialize + ) def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py index f17c068e833e..4bae2292227b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_serialization.py @@ -629,7 +629,7 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if xml_desc.get("attr", False): if xml_ns: ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) serialized.set(xml_name, new_attr) # type: ignore continue if xml_desc.get("text", False): @@ -662,8 +662,9 @@ def _serialize(self, target_obj, data_type=None, **kwargs): _serialized.update(_new_attr) # type: ignore _new_attr = _new_attr[k] # type: ignore _serialized = _serialized[k] - except ValueError: - continue + except ValueError as err: + if isinstance(err, SerializationError): + raise except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) @@ -741,6 +742,8 @@ def query(self, name, data, data_type, **kwargs): :param data: The data to be serialized. :param str data_type: The type to be serialized from. + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None @@ -749,10 +752,8 @@ def query(self, name, data, data_type, **kwargs): # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] - data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] - if not kwargs.get("skip_quote", False): - data = [quote(str(d), safe="") for d in data] - return str(self.serialize_iter(data, internal_data_type, **kwargs)) + do_quote = not kwargs.get("skip_quote", False) + return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) @@ -891,6 +892,8 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. :rtype: list, str """ if isinstance(data, str): @@ -903,9 +906,14 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized.append(None) + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + if div: serialized = ["" if s is None else str(s) for s in serialized] serialized = div.join(serialized) @@ -950,7 +958,9 @@ def serialize_dict(self, attr, dict_type, **kwargs): for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError: + except ValueError as err: + if isinstance(err, SerializationError): + raise serialized[self.serialize_unicode(key)] = None if "xml" in serialization_ctxt: @@ -1271,7 +1281,7 @@ def _extract_name_from_internal_type(internal_type): xml_name = internal_type_xml_map.get("name", internal_type.__name__) xml_ns = internal_type_xml_map.get("ns", None) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) return xml_name @@ -1295,7 +1305,7 @@ def xml_key_extractor(attr, attr_desc, data): # Integrate namespace if necessary xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) if xml_ns: - xml_name = "{}{}".format(xml_ns, xml_name) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) # If it's an attribute, that's simple if xml_desc.get("attr", False): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py index bd0df84f5319..0dafe0e287ff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_vendor.py @@ -5,8 +5,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import List, cast - from azure.core.pipeline.transport import HttpRequest @@ -16,15 +14,3 @@ def _convert_request(request, files=None): if files: request.set_formdata_body(files) return request - - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - formatted_components = cast(List[str], template.split("/")) - components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] - template = "/".join(components) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 47babc28d5ed..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "3.1.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py index d157b8f5b7e0..23f2cc2228c0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_configuration.py @@ -6,7 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration @@ -15,11 +14,6 @@ from .._version import VERSION -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports - if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential @@ -42,7 +36,7 @@ class DataFactoryManagementClientConfiguration(Configuration): # pylint: disabl def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", "2018-06-01") + api_version: str = kwargs.pop("api_version", "2018-06-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py index 0a1459a7f683..4076603e5dbc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py @@ -17,6 +17,7 @@ from ._configuration import DataFactoryManagementClientConfiguration from .operations import ( ActivityRunsOperations, + ChangeDataCaptureOperations, CredentialOperationsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, @@ -104,6 +105,8 @@ class DataFactoryManagementClient: # pylint: disable=client-accepts-api-version azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations :ivar global_parameters: GlobalParametersOperations operations :vartype global_parameters: azure.mgmt.datafactory.aio.operations.GlobalParametersOperations + :ivar change_data_capture: ChangeDataCaptureOperations operations + :vartype change_data_capture: azure.mgmt.datafactory.aio.operations.ChangeDataCaptureOperations :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. Required. @@ -127,7 +130,7 @@ def __init__( self._config = DataFactoryManagementClientConfiguration( credential=credential, subscription_id=subscription_id, **kwargs ) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) @@ -179,6 +182,9 @@ def __init__( self.global_parameters = GlobalParametersOperations( self._client, self._config, self._serialize, self._deserialize ) + self.change_data_capture = ChangeDataCaptureOperations( + self._client, self._config, self._serialize, self._deserialize + ) def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py index b4c9fa93e0a9..668131aae0e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py @@ -28,6 +28,7 @@ from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations from ._global_parameters_operations import GlobalParametersOperations +from ._change_data_capture_operations import ChangeDataCaptureOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import @@ -56,6 +57,7 @@ "PrivateEndpointConnectionOperations", "PrivateLinkResourcesOperations", "GlobalParametersOperations", + "ChangeDataCaptureOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py index d987ee7f21c7..77f97edd8eb6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_activity_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -28,10 +28,6 @@ from ..._vendor import _convert_request from ...operations._activity_runs_operations import build_query_by_pipeline_run_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -154,16 +150,14 @@ async def query_by_pipeline_run( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ActivityRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -184,8 +178,9 @@ async def query_by_pipeline_run( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py new file mode 100644 index 000000000000..78abc5ac2961 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_change_data_capture_operations.py @@ -0,0 +1,648 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._change_data_capture_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_factory_request, + build_start_request, + build_status_request, + build_stop_request, +) + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class ChangeDataCaptureOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datafactory.aio.DataFactoryManagementClient`'s + :attr:`change_data_capture` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_factory( + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> AsyncIterable["_models.ChangeDataCaptureResource"]: + """Lists all resources of type change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ChangeDataCaptureResource or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.ChangeDataCaptureResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ChangeDataCaptureListResponse] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_factory_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_factory.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ChangeDataCaptureListResponse", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_factory.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: _models.ChangeDataCaptureResource, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Required. + :type change_data_capture: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Required. + :type change_data_capture: IO + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: Union[_models.ChangeDataCaptureResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Is either a + ChangeDataCaptureResource type or a IO type. Required. + :type change_data_capture: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource or IO + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChangeDataCaptureResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(change_data_capture, (IOBase, bytes)): + _content = change_data_capture + else: + _json = self._serialize.body(change_data_capture, "ChangeDataCaptureResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + if_match=if_match, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Gets a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param if_none_match: ETag of the change data capture entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. Default value is None. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ChangeDataCaptureResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + if_none_match=if_none_match, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Deletes a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace_async + async def start( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Starts a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_start_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.start.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start" + } + + @distributed_trace_async + async def stop( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Stops a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_stop_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.stop.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop" + } + + @distributed_trace_async + async def status( + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> str: + """Gets the current status for the change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: str or the result of cls(response) + :rtype: str + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[str] = kwargs.pop("cls", None) + + request = build_status_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("str", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status" + } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py index f53930e95001..3148130e0c97 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_credential_operations_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -83,9 +79,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.CredentialListResponse] = kwargs.pop("cls", None) error_map = { @@ -139,8 +133,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -267,16 +262,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedIdentityCredentialResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(credential, (IO, bytes)): + if isinstance(credential, (IOBase, bytes)): _content = credential else: _json = self._serialize.body(credential, "ManagedIdentityCredentialResource") @@ -298,8 +291,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -356,9 +350,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.ManagedIdentityCredentialResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -375,8 +367,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -426,9 +419,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -444,8 +435,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py index f392ab11cbe2..4768d96a0a33 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -39,10 +39,6 @@ build_query_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -84,16 +80,14 @@ async def _create_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "CreateDataFlowDebugSessionRequest") @@ -113,8 +107,9 @@ async def _create_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -249,9 +244,7 @@ async def begin_create( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CreateDataFlowDebugSessionResponse] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) @@ -316,9 +309,7 @@ def query_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.QueryDataFlowDebugSessionsResponse] = kwargs.pop("cls", None) error_map = { @@ -372,8 +363,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -479,16 +471,14 @@ async def add_data_flow( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.AddDataFlowToDebugSessionResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DataFlowDebugPackage") @@ -508,8 +498,9 @@ async def add_data_flow( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -619,16 +610,14 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DeleteDataFlowDebugSessionRequest") @@ -648,8 +637,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -683,16 +673,14 @@ async def _execute_command_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DataFlowDebugCommandRequest") @@ -712,8 +700,9 @@ async def _execute_command_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -848,9 +837,7 @@ async def begin_execute_command( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataFlowDebugCommandResponse] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py index bc4a273209ca..b42b7355a6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_data_flows_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -174,16 +170,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataFlowResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(data_flow, (IO, bytes)): + if isinstance(data_flow, (IOBase, bytes)): _content = data_flow else: _json = self._serialize.body(data_flow, "DataFlowResource") @@ -205,8 +199,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -263,9 +258,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataFlowResource] = kwargs.pop("cls", None) request = build_get_request( @@ -282,8 +275,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -331,9 +325,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -349,8 +341,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -385,9 +378,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataFlowListResponse] = kwargs.pop("cls", None) error_map = { @@ -441,8 +432,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py index f8c4ede50605..b6437d029ed5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_datasets_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -81,9 +77,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatasetListResponse] = kwargs.pop("cls", None) error_map = { @@ -137,8 +131,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -265,16 +260,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatasetResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(dataset, (IO, bytes)): + if isinstance(dataset, (IOBase, bytes)): _content = dataset else: _json = self._serialize.body(dataset, "DatasetResource") @@ -296,8 +289,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -354,9 +348,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.DatasetResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -373,8 +365,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -424,9 +417,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -442,8 +433,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py index b46bab9b9d2f..e24d45d54b98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_exposure_control_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -32,10 +32,6 @@ build_query_feature_values_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -132,16 +128,14 @@ async def get_feature_value( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_request, (IO, bytes)): + if isinstance(exposure_control_request, (IOBase, bytes)): _content = exposure_control_request else: _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") @@ -160,8 +154,9 @@ async def get_feature_value( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -271,16 +266,14 @@ async def get_feature_value_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_request, (IO, bytes)): + if isinstance(exposure_control_request, (IOBase, bytes)): _content = exposure_control_request else: _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") @@ -300,8 +293,9 @@ async def get_feature_value_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -415,16 +409,14 @@ async def query_feature_values_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlBatchResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_batch_request, (IO, bytes)): + if isinstance(exposure_control_batch_request, (IOBase, bytes)): _content = exposure_control_batch_request else: _json = self._serialize.body(exposure_control_batch_request, "ExposureControlBatchRequest") @@ -444,8 +436,9 @@ async def query_feature_values_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py index e035c980ee5c..814bec939053 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -41,10 +41,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -80,9 +76,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Factory"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) error_map = { @@ -134,8 +128,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -222,16 +217,14 @@ async def configure_factory_repo( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory_repo_update, (IO, bytes)): + if isinstance(factory_repo_update, (IOBase, bytes)): _content = factory_repo_update else: _json = self._serialize.body(factory_repo_update, "FactoryRepoUpdate") @@ -250,8 +243,9 @@ async def configure_factory_repo( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -285,9 +279,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Asy _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) error_map = { @@ -340,8 +332,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -458,16 +451,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory, (IO, bytes)): + if isinstance(factory, (IOBase, bytes)): _content = factory else: _json = self._serialize.body(factory, "Factory") @@ -488,8 +479,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -599,16 +591,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory_update_parameters, (IO, bytes)): + if isinstance(factory_update_parameters, (IOBase, bytes)): _content = factory_update_parameters else: _json = self._serialize.body(factory_update_parameters, "FactoryUpdateParameters") @@ -628,8 +618,9 @@ async def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -679,9 +670,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.Factory]] = kwargs.pop("cls", None) request = build_get_request( @@ -697,8 +686,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -746,9 +736,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -763,8 +751,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -871,16 +860,14 @@ async def get_git_hub_access_token( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.GitHubAccessTokenResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(git_hub_access_token_request, (IO, bytes)): + if isinstance(git_hub_access_token_request, (IOBase, bytes)): _content = git_hub_access_token_request else: _json = self._serialize.body(git_hub_access_token_request, "GitHubAccessTokenRequest") @@ -900,8 +887,9 @@ async def get_git_hub_access_token( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1007,16 +995,14 @@ async def get_data_plane_access( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.AccessPolicyResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(policy, (IO, bytes)): + if isinstance(policy, (IOBase, bytes)): _content = policy else: _json = self._serialize.body(policy, "UserAccessPolicy") @@ -1036,8 +1022,9 @@ async def get_data_plane_access( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py index 4a8e51af7706..88e5b1f79b02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -83,9 +79,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.GlobalParameterListResponse] = kwargs.pop("cls", None) error_map = { @@ -139,8 +133,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -184,9 +179,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.GlobalParameterResource] = kwargs.pop("cls", None) request = build_get_request( @@ -202,8 +195,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -322,16 +316,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.GlobalParameterResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(default, (IO, bytes)): + if isinstance(default, (IOBase, bytes)): _content = default else: _json = self._serialize.body(default, "GlobalParameterResource") @@ -352,8 +344,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -401,9 +394,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -419,8 +410,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py index 28710a207174..33be75a7e6ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -33,10 +33,6 @@ build_update_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -90,9 +86,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SelfHostedIntegrationRuntimeNode] = kwargs.pop("cls", None) request = build_get_request( @@ -109,8 +103,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -160,9 +155,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -179,8 +172,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -308,16 +302,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SelfHostedIntegrationRuntimeNode] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(update_integration_runtime_node_request, (IO, bytes)): + if isinstance(update_integration_runtime_node_request, (IOBase, bytes)): _content = update_integration_runtime_node_request else: _json = self._serialize.body(update_integration_runtime_node_request, "UpdateIntegrationRuntimeNodeRequest") @@ -339,8 +331,9 @@ async def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -390,9 +383,7 @@ async def get_ip_address( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeNodeIpAddress] = kwargs.pop("cls", None) request = build_get_ip_address_request( @@ -409,8 +400,9 @@ async def get_ip_address( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 82dfa0a52ebc..3f4559d327d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload from azure.core.exceptions import ( @@ -30,10 +30,6 @@ from ..._vendor import _convert_request from ...operations._integration_runtime_object_metadata_operations import build_get_request, build_refresh_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -71,9 +67,7 @@ async def _refresh_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) request = build_refresh_request( @@ -89,8 +83,9 @@ async def _refresh_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -141,9 +136,7 @@ async def begin_refresh( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SsisObjectMetadataStatusResponse] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -290,16 +283,14 @@ async def get( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SsisObjectMetadataListResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(get_metadata_request, (IO, bytes)): + if isinstance(get_metadata_request, (IOBase, bytes)): _content = get_metadata_request else: if get_metadata_request is not None: @@ -323,8 +314,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py index e90111760ed5..230432604733 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_integration_runtimes_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -51,10 +51,6 @@ build_upgrade_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -98,9 +94,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeListResponse] = kwargs.pop("cls", None) error_map = { @@ -154,8 +148,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -285,16 +280,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(integration_runtime, (IO, bytes)): + if isinstance(integration_runtime, (IOBase, bytes)): _content = integration_runtime else: _json = self._serialize.body(integration_runtime, "IntegrationRuntimeResource") @@ -316,8 +309,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -374,9 +368,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.IntegrationRuntimeResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -393,8 +385,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -519,16 +512,14 @@ async def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(update_integration_runtime_request, (IO, bytes)): + if isinstance(update_integration_runtime_request, (IOBase, bytes)): _content = update_integration_runtime_request else: _json = self._serialize.body(update_integration_runtime_request, "UpdateIntegrationRuntimeRequest") @@ -549,8 +540,9 @@ async def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -598,9 +590,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -616,8 +606,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -661,9 +652,7 @@ async def get_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) request = build_get_status_request( @@ -679,8 +668,9 @@ async def get_status( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -730,9 +720,7 @@ async def list_outbound_network_dependencies_endpoints( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] = kwargs.pop("cls", None) request = build_list_outbound_network_dependencies_endpoints_request( @@ -748,8 +736,9 @@ async def list_outbound_network_dependencies_endpoints( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -800,9 +789,7 @@ async def get_connection_info( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeConnectionInfo] = kwargs.pop("cls", None) request = build_get_connection_info_request( @@ -818,8 +805,9 @@ async def get_connection_info( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -943,16 +931,14 @@ async def regenerate_auth_key( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeAuthKeys] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(regenerate_key_parameters, (IO, bytes)): + if isinstance(regenerate_key_parameters, (IOBase, bytes)): _content = regenerate_key_parameters else: _json = self._serialize.body(regenerate_key_parameters, "IntegrationRuntimeRegenerateKeyParameters") @@ -973,8 +959,9 @@ async def regenerate_auth_key( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1022,9 +1009,7 @@ async def list_auth_keys( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeAuthKeys] = kwargs.pop("cls", None) request = build_list_auth_keys_request( @@ -1040,8 +1025,9 @@ async def list_auth_keys( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1075,9 +1061,7 @@ async def _start_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) request = build_start_request( @@ -1093,8 +1077,9 @@ async def _start_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1145,9 +1130,7 @@ async def begin_start( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1204,9 +1187,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( @@ -1222,8 +1203,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1266,9 +1248,7 @@ async def begin_stop( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1340,9 +1320,7 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_sync_credentials_request( @@ -1358,8 +1336,9 @@ async def sync_credentials( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1404,9 +1383,7 @@ async def get_monitoring_data( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None) request = build_get_monitoring_data_request( @@ -1422,8 +1399,9 @@ async def get_monitoring_data( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1471,9 +1449,7 @@ async def upgrade( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_upgrade_request( @@ -1489,8 +1465,9 @@ async def upgrade( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1612,16 +1589,14 @@ async def remove_links( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(linked_integration_runtime_request, (IO, bytes)): + if isinstance(linked_integration_runtime_request, (IOBase, bytes)): _content = linked_integration_runtime_request else: _json = self._serialize.body(linked_integration_runtime_request, "LinkedIntegrationRuntimeRequest") @@ -1642,8 +1617,9 @@ async def remove_links( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1762,16 +1738,14 @@ async def create_linked_integration_runtime( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(create_linked_integration_runtime_request, (IO, bytes)): + if isinstance(create_linked_integration_runtime_request, (IOBase, bytes)): _content = create_linked_integration_runtime_request else: _json = self._serialize.body( @@ -1794,8 +1768,9 @@ async def create_linked_integration_runtime( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py index 3b3aacd91b42..b5e35cd731c7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_linked_services_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -83,9 +79,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedServiceListResponse] = kwargs.pop("cls", None) error_map = { @@ -139,8 +133,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -270,16 +265,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedServiceResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(linked_service, (IO, bytes)): + if isinstance(linked_service, (IOBase, bytes)): _content = linked_service else: _json = self._serialize.body(linked_service, "LinkedServiceResource") @@ -301,8 +294,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -359,9 +353,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.LinkedServiceResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -378,8 +370,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -429,9 +422,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -447,8 +438,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py index 459c7bd228f2..32d0bba56589 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -36,10 +36,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -85,9 +81,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedPrivateEndpointListResponse] = kwargs.pop("cls", None) error_map = { @@ -142,8 +136,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -283,16 +278,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedPrivateEndpointResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(managed_private_endpoint, (IO, bytes)): + if isinstance(managed_private_endpoint, (IOBase, bytes)): _content = managed_private_endpoint else: _json = self._serialize.body(managed_private_endpoint, "ManagedPrivateEndpointResource") @@ -315,8 +308,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -376,9 +370,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedPrivateEndpointResource] = kwargs.pop("cls", None) request = build_get_request( @@ -396,8 +388,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -452,9 +445,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -471,8 +462,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py index b71352db5578..2b4a3939a7f7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -35,10 +35,6 @@ build_list_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -82,9 +78,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedVirtualNetworkListResponse] = kwargs.pop("cls", None) error_map = { @@ -138,8 +132,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -270,16 +265,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedVirtualNetworkResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(managed_virtual_network, (IO, bytes)): + if isinstance(managed_virtual_network, (IOBase, bytes)): _content = managed_virtual_network else: _json = self._serialize.body(managed_virtual_network, "ManagedVirtualNetworkResource") @@ -301,8 +294,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -359,9 +353,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedVirtualNetworkResource] = kwargs.pop("cls", None) request = build_get_request( @@ -378,8 +370,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py index 922f278d897a..e4f6d05ea917 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar import urllib.parse @@ -30,10 +29,6 @@ from ..._vendor import _convert_request from ...operations._operations import build_list_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -69,9 +64,7 @@ def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResponse] = kwargs.pop("cls", None) error_map = { @@ -122,8 +115,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py index bbebe8d3c2d5..374007a9dd00 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipeline_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -32,10 +32,6 @@ build_query_by_factory_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -149,16 +145,14 @@ async def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PipelineRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -178,8 +172,9 @@ async def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -225,9 +220,7 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PipelineRun] = kwargs.pop("cls", None) request = build_get_request( @@ -243,8 +236,9 @@ async def get(self, resource_group_name: str, factory_name: str, run_id: str, ** request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -300,9 +294,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_cancel_request( @@ -319,8 +311,9 @@ async def cancel( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py index 54d7af0bf362..c0cee25442ba 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_pipelines_operations.py @@ -6,6 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse @@ -41,10 +42,6 @@ from collections.abc import MutableMapping else: from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -88,9 +85,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PipelineListResponse] = kwargs.pop("cls", None) error_map = { @@ -144,8 +139,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -272,16 +268,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PipelineResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(pipeline, (IO, bytes)): + if isinstance(pipeline, (IOBase, bytes)): _content = pipeline else: _json = self._serialize.body(pipeline, "PipelineResource") @@ -303,8 +297,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -361,9 +356,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.PipelineResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -380,8 +373,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -431,9 +425,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -449,8 +441,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -621,16 +614,14 @@ async def create_run( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CreateRunResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: if parameters is not None: @@ -658,8 +649,9 @@ async def create_run( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py index da2bcba32bdc..74168c2a4f78 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_end_point_connections_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar import urllib.parse @@ -30,10 +29,6 @@ from ..._vendor import _convert_request from ...operations._private_end_point_connections_operations import build_list_by_factory_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -77,9 +72,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointConnectionListResponse] = kwargs.pop("cls", None) error_map = { @@ -133,8 +126,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py index 032a1be7c99b..0e66482910b0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_endpoint_connection_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -32,10 +32,6 @@ build_get_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -175,16 +171,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PrivateEndpointConnectionResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(private_endpoint_wrapper, (IO, bytes)): + if isinstance(private_endpoint_wrapper, (IOBase, bytes)): _content = private_endpoint_wrapper else: _json = self._serialize.body(private_endpoint_wrapper, "PrivateLinkConnectionApprovalRequestResource") @@ -206,8 +200,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -264,9 +259,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointConnectionResource] = kwargs.pop("cls", None) request = build_get_request( @@ -283,8 +276,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -332,9 +326,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -350,8 +342,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py index 11aeb413aba4..9641e0f3d311 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_private_link_resources_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ( @@ -28,10 +27,6 @@ from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_get_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -81,9 +76,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateLinkResourcesWrapper] = kwargs.pop("cls", None) request = build_get_request( @@ -98,8 +91,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py index e8da64448396..75518d070ec1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_trigger_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -32,10 +32,6 @@ build_rerun_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -89,9 +85,7 @@ async def rerun( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_rerun_request( @@ -108,8 +102,9 @@ async def rerun( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -155,9 +150,7 @@ async def cancel( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_cancel_request( @@ -174,8 +167,9 @@ async def cancel( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -281,16 +275,14 @@ async def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -310,8 +302,9 @@ async def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py index c5c7add922ed..d37c98a086a4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_triggers_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -44,10 +44,6 @@ build_unsubscribe_from_events_request, ) -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -89,9 +85,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerListResponse] = kwargs.pop("cls", None) error_map = { @@ -145,8 +139,9 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -252,16 +247,14 @@ async def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "TriggerFilterParameters") @@ -281,8 +274,9 @@ async def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -413,16 +407,14 @@ async def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(trigger, (IO, bytes)): + if isinstance(trigger, (IOBase, bytes)): _content = trigger else: _json = self._serialize.body(trigger, "TriggerResource") @@ -444,8 +436,9 @@ async def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -502,9 +495,7 @@ async def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -521,8 +512,9 @@ async def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -572,9 +564,7 @@ async def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -590,8 +580,9 @@ async def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -621,9 +612,7 @@ async def _subscribe_to_events_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) request = build_subscribe_to_events_request( @@ -639,8 +628,9 @@ async def _subscribe_to_events_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -691,9 +681,7 @@ async def begin_subscribe_to_events( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -764,9 +752,7 @@ async def get_event_subscription_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) request = build_get_event_subscription_status_request( @@ -782,8 +768,9 @@ async def get_event_subscription_status( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -817,9 +804,7 @@ async def _unsubscribe_from_events_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) request = build_unsubscribe_from_events_request( @@ -835,8 +820,9 @@ async def _unsubscribe_from_events_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -887,9 +873,7 @@ async def begin_unsubscribe_from_events( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -946,9 +930,7 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_start_request( @@ -964,8 +946,9 @@ async def _start_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1008,9 +991,7 @@ async def begin_start( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1065,9 +1046,7 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( @@ -1083,8 +1062,9 @@ async def _stop_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1127,9 +1107,7 @@ async def begin_stop( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 8b7d9ff583ce..702f57a07cff 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -134,6 +134,9 @@ from ._models_py3 import CassandraSource from ._models_py3 import CassandraTableDataset from ._models_py3 import ChainingTrigger +from ._models_py3 import ChangeDataCaptureFolder +from ._models_py3 import ChangeDataCaptureListResponse +from ._models_py3 import ChangeDataCaptureResource from ._models_py3 import CloudError from ._models_py3 import CmdkeySetup from ._models_py3 import CommonDataServiceForAppsEntityDataset @@ -197,6 +200,7 @@ from ._models_py3 import DataFlowSourceSetting from ._models_py3 import DataFlowStagingInfo from ._models_py3 import DataLakeAnalyticsUSQLActivity +from ._models_py3 import DataMapperMapping from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity from ._models_py3 import DatabricksSparkPythonActivity @@ -351,6 +355,7 @@ from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties from ._models_py3 import IntegrationRuntimeCustomerVirtualNetwork from ._models_py3 import IntegrationRuntimeDataFlowProperties +from ._models_py3 import IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem from ._models_py3 import IntegrationRuntimeDataProxyProperties from ._models_py3 import IntegrationRuntimeDebugResource from ._models_py3 import IntegrationRuntimeListResponse @@ -379,6 +384,13 @@ from ._models_py3 import JsonSink from ._models_py3 import JsonSource from ._models_py3 import JsonWriteSettings +from ._models_py3 import LakeHouseLinkedService +from ._models_py3 import LakeHouseLocation +from ._models_py3 import LakeHouseReadSettings +from ._models_py3 import LakeHouseTableDataset +from ._models_py3 import LakeHouseTableSink +from ._models_py3 import LakeHouseTableSource +from ._models_py3 import LakeHouseWriteSettings from ._models_py3 import LinkedIntegrationRuntime from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization @@ -410,6 +422,18 @@ from ._models_py3 import ManagedVirtualNetworkListResponse from ._models_py3 import ManagedVirtualNetworkReference from ._models_py3 import ManagedVirtualNetworkResource +from ._models_py3 import MapperAttributeMapping +from ._models_py3 import MapperAttributeMappings +from ._models_py3 import MapperAttributeReference +from ._models_py3 import MapperConnection +from ._models_py3 import MapperConnectionReference +from ._models_py3 import MapperDslConnectorProperties +from ._models_py3 import MapperPolicy +from ._models_py3 import MapperPolicyRecurrence +from ._models_py3 import MapperSourceConnectionsInfo +from ._models_py3 import MapperTable +from ._models_py3 import MapperTableSchema +from ._models_py3 import MapperTargetConnectionsInfo from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService from ._models_py3 import MariaDBSource @@ -477,10 +501,12 @@ from ._models_py3 import OrcSink from ._models_py3 import OrcSource from ._models_py3 import OrcWriteSettings +from ._models_py3 import OutputColumn from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat +from ._models_py3 import ParquetReadSettings from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource from ._models_py3 import ParquetWriteSettings @@ -595,6 +621,7 @@ from ._models_py3 import ScriptActivityScriptBlock from ._models_py3 import ScriptActivityTypePropertiesLogSettings from ._models_py3 import SecretBase +from ._models_py3 import SecureInputOutputPolicy from ._models_py3 import SecureString from ._models_py3 import SelfDependencyTumblingWindowTriggerReference from ._models_py3 import SelfHostedIntegrationRuntime @@ -730,6 +757,8 @@ from ._models_py3 import ZohoObjectDataset from ._models_py3 import ZohoSource +from ._data_factory_management_client_enums import ActivityOnInactiveMarkAs +from ._data_factory_management_client_enums import ActivityState from ._data_factory_management_client_enums import AmazonRdsForOraclePartitionOption from ._data_factory_management_client_enums import AvroCompressionCodec from ._data_factory_management_client_enums import AzureFunctionActivityMethod @@ -740,9 +769,9 @@ from ._data_factory_management_client_enums import CassandraSourceReadConsistencyLevels from ._data_factory_management_client_enums import CompressionCodec from ._data_factory_management_client_enums import ConfigurationType +from ._data_factory_management_client_enums import ConnectionType from ._data_factory_management_client_enums import CopyBehaviorType from ._data_factory_management_client_enums import CosmosDbConnectionMode -from ._data_factory_management_client_enums import CosmosDbServicePrincipalCredentialType from ._data_factory_management_client_enums import CredentialReferenceType from ._data_factory_management_client_enums import DataFlowComputeType from ._data_factory_management_client_enums import DataFlowDebugCommandType @@ -759,6 +788,7 @@ from ._data_factory_management_client_enums import EventSubscriptionStatus from ._data_factory_management_client_enums import ExpressionType from ._data_factory_management_client_enums import FactoryIdentityType +from ._data_factory_management_client_enums import FrequencyType from ._data_factory_management_client_enums import FtpAuthenticationType from ._data_factory_management_client_enums import GlobalParameterType from ._data_factory_management_client_enums import GoogleAdWordsAuthenticationType @@ -786,6 +816,7 @@ from ._data_factory_management_client_enums import JsonWriteFilePattern from ._data_factory_management_client_enums import ManagedIntegrationRuntimeNodeStatus from ._data_factory_management_client_enums import ManagedVirtualNetworkReferenceType +from ._data_factory_management_client_enums import MappingType from ._data_factory_management_client_enums import MongoDbAuthenticationType from ._data_factory_management_client_enums import NetezzaPartitionOption from ._data_factory_management_client_enums import NotebookParameterType @@ -980,6 +1011,9 @@ "CassandraSource", "CassandraTableDataset", "ChainingTrigger", + "ChangeDataCaptureFolder", + "ChangeDataCaptureListResponse", + "ChangeDataCaptureResource", "CloudError", "CmdkeySetup", "CommonDataServiceForAppsEntityDataset", @@ -1043,6 +1077,7 @@ "DataFlowSourceSetting", "DataFlowStagingInfo", "DataLakeAnalyticsUSQLActivity", + "DataMapperMapping", "DatabricksNotebookActivity", "DatabricksSparkJarActivity", "DatabricksSparkPythonActivity", @@ -1197,6 +1232,7 @@ "IntegrationRuntimeCustomSetupScriptProperties", "IntegrationRuntimeCustomerVirtualNetwork", "IntegrationRuntimeDataFlowProperties", + "IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem", "IntegrationRuntimeDataProxyProperties", "IntegrationRuntimeDebugResource", "IntegrationRuntimeListResponse", @@ -1225,6 +1261,13 @@ "JsonSink", "JsonSource", "JsonWriteSettings", + "LakeHouseLinkedService", + "LakeHouseLocation", + "LakeHouseReadSettings", + "LakeHouseTableDataset", + "LakeHouseTableSink", + "LakeHouseTableSource", + "LakeHouseWriteSettings", "LinkedIntegrationRuntime", "LinkedIntegrationRuntimeKeyAuthorization", "LinkedIntegrationRuntimeRbacAuthorization", @@ -1256,6 +1299,18 @@ "ManagedVirtualNetworkListResponse", "ManagedVirtualNetworkReference", "ManagedVirtualNetworkResource", + "MapperAttributeMapping", + "MapperAttributeMappings", + "MapperAttributeReference", + "MapperConnection", + "MapperConnectionReference", + "MapperDslConnectorProperties", + "MapperPolicy", + "MapperPolicyRecurrence", + "MapperSourceConnectionsInfo", + "MapperTable", + "MapperTableSchema", + "MapperTargetConnectionsInfo", "MappingDataFlow", "MariaDBLinkedService", "MariaDBSource", @@ -1323,10 +1378,12 @@ "OrcSink", "OrcSource", "OrcWriteSettings", + "OutputColumn", "PackageStore", "ParameterSpecification", "ParquetDataset", "ParquetFormat", + "ParquetReadSettings", "ParquetSink", "ParquetSource", "ParquetWriteSettings", @@ -1441,6 +1498,7 @@ "ScriptActivityScriptBlock", "ScriptActivityTypePropertiesLogSettings", "SecretBase", + "SecureInputOutputPolicy", "SecureString", "SelfDependencyTumblingWindowTriggerReference", "SelfHostedIntegrationRuntime", @@ -1575,6 +1633,8 @@ "ZohoLinkedService", "ZohoObjectDataset", "ZohoSource", + "ActivityOnInactiveMarkAs", + "ActivityState", "AmazonRdsForOraclePartitionOption", "AvroCompressionCodec", "AzureFunctionActivityMethod", @@ -1585,9 +1645,9 @@ "CassandraSourceReadConsistencyLevels", "CompressionCodec", "ConfigurationType", + "ConnectionType", "CopyBehaviorType", "CosmosDbConnectionMode", - "CosmosDbServicePrincipalCredentialType", "CredentialReferenceType", "DataFlowComputeType", "DataFlowDebugCommandType", @@ -1604,6 +1664,7 @@ "EventSubscriptionStatus", "ExpressionType", "FactoryIdentityType", + "FrequencyType", "FtpAuthenticationType", "GlobalParameterType", "GoogleAdWordsAuthenticationType", @@ -1631,6 +1692,7 @@ "JsonWriteFilePattern", "ManagedIntegrationRuntimeNodeStatus", "ManagedVirtualNetworkReferenceType", + "MappingType", "MongoDbAuthenticationType", "NetezzaPartitionOption", "NotebookParameterType", diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index a2b04c92ab26..b0ad5505dd4a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -10,6 +10,25 @@ from azure.core import CaseInsensitiveEnumMeta +class ActivityOnInactiveMarkAs(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status result of the activity when the state is set to Inactive. This is an optional property + and if not provided when the activity is inactive, the status will be Succeeded by default. + """ + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + SKIPPED = "Skipped" + + +class ActivityState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Activity state. This is an optional property and if not provided, the state will be Active by + default. + """ + + ACTIVE = "Active" + INACTIVE = "Inactive" + + class AmazonRdsForOraclePartitionOption(str, Enum, metaclass=CaseInsensitiveEnumMeta): """AmazonRdsForOraclePartitionOption.""" @@ -112,6 +131,12 @@ class ConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): ARTIFACT = "Artifact" +class ConnectionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of connection via linked service or dataset.""" + + LINKEDSERVICETYPE = "linkedservicetype" + + class CopyBehaviorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """All available types of copy behavior.""" @@ -121,24 +146,12 @@ class CopyBehaviorType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class CosmosDbConnectionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The connection mode used to access CosmosDB account. Type: string (or Expression with - resultType string). - """ + """The connection mode used to access CosmosDB account. Type: string.""" GATEWAY = "Gateway" DIRECT = "Direct" -class CosmosDbServicePrincipalCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The service principal credential type to use in Server-To-Server authentication. - 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or - Expression with resultType string). - """ - - SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" - SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" - - class CredentialReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Credential reference type.""" @@ -266,6 +279,14 @@ class FactoryIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" +class FrequencyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Frequency of period in terms of 'Hour', 'Minute' or 'Second'.""" + + HOUR = "Hour" + MINUTE = "Minute" + SECOND = "Second" + + class FtpAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The authentication type to be used to connect to the FTP server.""" @@ -489,6 +510,14 @@ class ManagedVirtualNetworkReferenceType(str, Enum, metaclass=CaseInsensitiveEnu MANAGED_VIRTUAL_NETWORK_REFERENCE = "ManagedVirtualNetworkReference" +class MappingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved as 'Derived'.""" + + DIRECT = "Direct" + DERIVED = "Derived" + AGGREGATE = "Aggregate" + + class MongoDbAuthenticationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The authentication type to be used to connect to the MongoDB database.""" @@ -650,6 +679,7 @@ class RunQueryFilterOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): NOT_EQUALS = "NotEquals" IN = "In" NOT_IN = "NotIn" + IN_ENUM = "In" class RunQueryOrder(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -685,7 +715,7 @@ class SalesforceSinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): class SalesforceSourceReadBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The read behavior for the operation. Default is Query.""" + """The Salesforce read behavior for the operation.""" QUERY = "Query" QUERY_ALL = "QueryAll" @@ -835,9 +865,7 @@ class SparkThriftTransportProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta) class SqlAlwaysEncryptedAkvAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Sql always encrypted AKV authentication type. Type: string (or Expression with resultType - string). - """ + """Sql always encrypted AKV authentication type. Type: string.""" SERVICE_PRINCIPAL = "ServicePrincipal" MANAGED_IDENTITY = "ManagedIdentity" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 2852b80e7c57..a053c22f90c3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -80,6 +80,13 @@ class Activity(_serialization.Model): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -96,6 +103,8 @@ class Activity(_serialization.Model): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, } @@ -114,6 +123,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, **kwargs: Any @@ -126,6 +137,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -136,6 +154,8 @@ def __init__( self.name = name self.type: Optional[str] = None self.description = description + self.state = state + self.on_inactive_mark_as = on_inactive_mark_as self.depends_on = depends_on self.user_properties = user_properties @@ -466,13 +486,14 @@ class LinkedService(_serialization.Model): GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, - MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, - MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, - NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, - OracleLinkedService, OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, - PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, - QuickBooksLinkedService, QuickbaseLinkedService, ResponsysLinkedService, - RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, + LakeHouseLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, + MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, + MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, + OdbcLinkedService, Office365LinkedService, OracleLinkedService, + OracleCloudStorageLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, + PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, + QuickbaseLinkedService, ResponsysLinkedService, RestServiceLinkedService, + SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOdpLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, @@ -576,6 +597,7 @@ class LinkedService(_serialization.Model): "Impala": "ImpalaLinkedService", "Informix": "InformixLinkedService", "Jira": "JiraLinkedService", + "LakeHouse": "LakeHouseLinkedService", "Magento": "MagentoLinkedService", "MariaDB": "MariaDBLinkedService", "Marketo": "MarketoLinkedService", @@ -705,9 +727,8 @@ class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instanc connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -734,7 +755,7 @@ class AmazonMWSLinkedService(LinkedService): # pylint: disable=too-many-instanc "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -754,7 +775,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -795,9 +816,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -838,19 +858,20 @@ class Dataset(_serialization.Model): FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, - JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, - MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, - MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, - OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, - OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, - PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, - RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, - SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, - SapEccResourceDataset, SapHanaTableDataset, SapOdpResourceDataset, SapOpenHubTableDataset, - SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, - ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, - SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, - WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset + JsonDataset, LakeHouseTableDataset, MagentoObjectDataset, MariaDBTableDataset, + MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, + MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, + ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, + OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, + PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, + ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, + SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, + SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, + SapOdpResourceDataset, SapOpenHubTableDataset, SapTableResourceDataset, + ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, + SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, + SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, + XeroObjectDataset, XmlDataset, ZohoObjectDataset All required parameters must be populated in order to send to Azure. @@ -945,6 +966,7 @@ class Dataset(_serialization.Model): "InformixTable": "InformixTableDataset", "JiraObject": "JiraObjectDataset", "Json": "JsonDataset", + "LakeHouseTable": "LakeHouseTableDataset", "MagentoObject": "MagentoObjectDataset", "MariaDBTable": "MariaDBTableDataset", "MarketoObject": "MarketoObjectDataset", @@ -1153,10 +1175,11 @@ class CopySource(_serialization.Model): AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, - ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, - MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, - OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, - SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource + ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, LakeHouseTableSource, + MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, + Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, + SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, + WebSource, XmlSource All required parameters must be populated in order to send to Azure. @@ -1214,6 +1237,7 @@ class CopySource(_serialization.Model): "HdfsSource": "HdfsSource", "HttpSource": "HttpSource", "JsonSource": "JsonSource", + "LakeHouseTableSource": "LakeHouseTableSource", "MicrosoftAccessSource": "MicrosoftAccessSource", "MongoDbAtlasSource": "MongoDbAtlasSource", "MongoDbSource": "MongoDbSource", @@ -1566,9 +1590,8 @@ class AmazonRdsForOracleLinkedService(LinkedService): :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -1585,7 +1608,7 @@ class AmazonRdsForOracleLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -1598,7 +1621,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -1619,9 +1642,8 @@ def __init__( :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -1954,9 +1976,8 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too- :ivar password: The on-premises Windows authentication password. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ @@ -1976,7 +1997,7 @@ class AmazonRdsForSqlServerLinkedService(LinkedService): # pylint: disable=too- "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "always_encrypted_settings": { "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", @@ -1994,7 +2015,7 @@ def __init__( annotations: Optional[List[JSON]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, **kwargs: Any ) -> None: @@ -2019,9 +2040,8 @@ def __init__( :keyword password: The on-premises Windows authentication password. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties @@ -2079,6 +2099,10 @@ class AmazonRdsForSqlServerSource(TabularSource): # pylint: disable=too-many-in :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: JSON + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :vartype isolation_level: JSON :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -2104,6 +2128,7 @@ class AmazonRdsForSqlServerSource(TabularSource): # pylint: disable=too-many-in "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, "partition_option": {"key": "partitionOption", "type": "object"}, "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, @@ -2122,6 +2147,7 @@ def __init__( sql_reader_query: Optional[JSON] = None, sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[JSON] = None, + isolation_level: Optional[JSON] = None, produce_additional_types: Optional[JSON] = None, partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, @@ -2159,6 +2185,10 @@ def __init__( :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: JSON + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: JSON :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -2181,6 +2211,7 @@ def __init__( self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings @@ -2332,9 +2363,8 @@ class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-in connections. The default value is 5439. Type: integer (or Expression with resultType integer). :vartype port: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -2355,7 +2385,7 @@ class AmazonRedshiftLinkedService(LinkedService): # pylint: disable=too-many-in "password": {"key": "typeProperties.password", "type": "SecretBase"}, "database": {"key": "typeProperties.database", "type": "object"}, "port": {"key": "typeProperties.port", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -2371,7 +2401,7 @@ def __init__( username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, port: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -2401,9 +2431,8 @@ def __init__( connections. The default value is 5439. Type: integer (or Expression with resultType integer). :paramtype port: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -2688,9 +2717,8 @@ class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-man access. Default value is false. Type: boolean (or Expression with resultType boolean). :vartype force_path_style: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -2708,7 +2736,7 @@ class AmazonS3CompatibleLinkedService(LinkedService): # pylint: disable=too-man "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, "force_path_style": {"key": "typeProperties.forcePathStyle", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -2723,7 +2751,7 @@ def __init__( secret_access_key: Optional["_models.SecretBase"] = None, service_url: Optional[JSON] = None, force_path_style: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -2753,9 +2781,8 @@ def __init__( access. Default value is false. Type: boolean (or Expression with resultType boolean). :paramtype force_path_style: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -2779,8 +2806,8 @@ class DatasetLocation(_serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AmazonS3CompatibleLocation, AmazonS3Location, AzureBlobFSLocation, AzureBlobStorageLocation, AzureDataLakeStoreLocation, AzureFileStorageLocation, FileServerLocation, FtpServerLocation, - GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, OracleCloudStorageLocation, - SftpLocation + GoogleCloudStorageLocation, HdfsLocation, HttpServerLocation, LakeHouseLocation, + OracleCloudStorageLocation, SftpLocation All required parameters must be populated in order to send to Azure. @@ -2821,6 +2848,7 @@ class DatasetLocation(_serialization.Model): "GoogleCloudStorageLocation": "GoogleCloudStorageLocation", "HdfsLocation": "HdfsLocation", "HttpServerLocation": "HttpServerLocation", + "LakeHouseLocation": "LakeHouseLocation", "OracleCloudStorageLocation": "OracleCloudStorageLocation", "SftpLocation": "SftpLocation", } @@ -2931,7 +2959,7 @@ class StoreReadSettings(_serialization.Model): AmazonS3CompatibleReadSettings, AmazonS3ReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings, AzureDataLakeStoreReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, HdfsReadSettings, - HttpReadSettings, OracleCloudStorageReadSettings, SftpReadSettings + HttpReadSettings, LakeHouseReadSettings, OracleCloudStorageReadSettings, SftpReadSettings All required parameters must be populated in order to send to Azure. @@ -2972,6 +3000,7 @@ class StoreReadSettings(_serialization.Model): "GoogleCloudStorageReadSettings": "GoogleCloudStorageReadSettings", "HdfsReadSettings": "HdfsReadSettings", "HttpReadSettings": "HttpReadSettings", + "LakeHouseReadSettings": "LakeHouseReadSettings", "OracleCloudStorageReadSettings": "OracleCloudStorageReadSettings", "SftpReadSettings": "SftpReadSettings", } @@ -3035,8 +3064,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): # pylint: disable=too- configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -3065,7 +3095,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): # pylint: disable=too- "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -3083,7 +3113,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -3116,8 +3146,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -3348,9 +3379,8 @@ class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance :ivar session_token: The session token for the S3 temporary security credential. :vartype session_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -3369,7 +3399,7 @@ class AmazonS3LinkedService(LinkedService): # pylint: disable=too-many-instance "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, "session_token": {"key": "typeProperties.sessionToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -3385,7 +3415,7 @@ def __init__( secret_access_key: Optional["_models.SecretBase"] = None, service_url: Optional[JSON] = None, session_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -3416,9 +3446,8 @@ def __init__( :keyword session_token: The session token for the S3 temporary security credential. :paramtype session_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -3541,8 +3570,9 @@ class AmazonS3ReadSettings(StoreReadSettings): # pylint: disable=too-many-insta configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -3571,7 +3601,7 @@ class AmazonS3ReadSettings(StoreReadSettings): # pylint: disable=too-many-insta "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -3589,7 +3619,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -3622,8 +3652,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -3675,6 +3706,13 @@ class ControlActivity(Activity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -3691,6 +3729,8 @@ class ControlActivity(Activity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, } @@ -3718,6 +3758,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, **kwargs: Any @@ -3730,6 +3772,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -3739,6 +3788,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -3760,13 +3811,21 @@ class AppendVariableActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :ivar variable_name: Name of the variable whose value needs to be appended to. :vartype variable_name: str - :ivar value: Value to be appended. Could be a static value or Expression. + :ivar value: Value to be appended. Type: could be a static value matching type of the variable + item or Expression with resultType matching type of the variable item. :vartype value: JSON """ @@ -3780,6 +3839,8 @@ class AppendVariableActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "variable_name": {"key": "typeProperties.variableName", "type": "str"}, @@ -3792,6 +3853,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, variable_name: Optional[str] = None, @@ -3806,19 +3869,29 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :keyword variable_name: Name of the variable whose value needs to be appended to. :paramtype variable_name: str - :keyword value: Value to be appended. Could be a static value or Expression. + :keyword value: Value to be appended. Type: could be a static value matching type of the + variable item or Expression with resultType matching type of the variable item. :paramtype value: JSON """ super().__init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -3846,7 +3919,8 @@ class AppFiguresLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar user_name: The username of the Appfigures source. Required. + :ivar user_name: The username of the Appfigures source. Type: string (or Expression with + resultType string). Required. :vartype user_name: JSON :ivar password: The password of the AppFigures source. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase @@ -3898,7 +3972,8 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword user_name: The username of the Appfigures source. Required. + :keyword user_name: The username of the Appfigures source. Type: string (or Expression with + resultType string). Required. :paramtype user_name: JSON :keyword password: The password of the AppFigures source. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase @@ -3963,9 +4038,8 @@ class AsanaLinkedService(LinkedService): :ivar api_token: The api token for the Asana source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -3981,7 +4055,7 @@ class AsanaLinkedService(LinkedService): "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -3993,7 +4067,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -4011,9 +4085,8 @@ def __init__( :keyword api_token: The api token for the Asana source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -4270,9 +4343,9 @@ class CopySink(_serialization.Model): AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, - JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, - ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, - SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink + JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, + OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, + SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink All required parameters must be populated in order to send to Azure. @@ -4341,6 +4414,7 @@ class CopySink(_serialization.Model): "FileSystemSink": "FileSystemSink", "InformixSink": "InformixSink", "JsonSink": "JsonSink", + "LakeHouseTableSink": "LakeHouseTableSink", "MicrosoftAccessSink": "MicrosoftAccessSink", "MongoDbAtlasSink": "MongoDbAtlasSink", "MongoDbV2Sink": "MongoDbV2Sink", @@ -4818,9 +4892,8 @@ class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instan :ivar linked_service_name: The Azure Storage linked service reference. Required. :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -4845,7 +4918,7 @@ class AzureBatchLinkedService(LinkedService): # pylint: disable=too-many-instan "batch_uri": {"key": "typeProperties.batchUri", "type": "object"}, "pool_name": {"key": "typeProperties.poolName", "type": "object"}, "linked_service_name": {"key": "typeProperties.linkedServiceName", "type": "LinkedServiceReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -4862,7 +4935,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, access_key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: @@ -4892,9 +4965,8 @@ def __init__( :keyword linked_service_name: The Azure Storage linked service reference. Required. :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -5235,9 +5307,8 @@ class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-insta factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar service_principal_credential_type: The service principal credential type to use in @@ -5274,7 +5345,7 @@ class AzureBlobFSLinkedService(LinkedService): # pylint: disable=too-many-insta "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, @@ -5296,7 +5367,7 @@ def __init__( service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, @@ -5336,9 +5407,8 @@ def __init__( the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword service_principal_credential_type: The service principal credential type to use in @@ -5472,8 +5542,9 @@ class AzureBlobFSReadSettings(StoreReadSettings): # pylint: disable=too-many-in configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -5501,7 +5572,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): # pylint: disable=too-many-in "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -5518,7 +5589,7 @@ def __init__( wildcard_folder_path: Optional[JSON] = None, wildcard_file_name: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -5548,8 +5619,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -5609,7 +5681,8 @@ class AzureBlobFSSink(CopySink): :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_metrics_collection: JSON - :ivar copy_behavior: The type of copy behavior for copy sink. + :ivar copy_behavior: The type of copy behavior for copy sink. Type: string (or Expression with + resultType string). :vartype copy_behavior: JSON :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). @@ -5669,7 +5742,8 @@ def __init__( :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_metrics_collection: JSON - :keyword copy_behavior: The type of copy behavior for copy sink. + :keyword copy_behavior: The type of copy behavior for copy sink. Type: string (or Expression + with resultType string). :paramtype copy_behavior: JSON :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). @@ -5797,7 +5871,8 @@ class StoreWriteSettings(_serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, - AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings + AzureFileStorageWriteSettings, FileServerWriteSettings, LakeHouseWriteSettings, + SftpWriteSettings All required parameters must be populated in order to send to Azure. @@ -5835,6 +5910,7 @@ class StoreWriteSettings(_serialization.Model): "AzureDataLakeStoreWriteSettings": "AzureDataLakeStoreWriteSettings", "AzureFileStorageWriteSettings": "AzureFileStorageWriteSettings", "FileServerWriteSettings": "FileServerWriteSettings", + "LakeHouseWriteSettings": "LakeHouseWriteSettings", "SftpWriteSettings": "SftpWriteSettings", } } @@ -5973,7 +6049,7 @@ class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many- :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. - :vartype service_endpoint: str + :vartype service_endpoint: JSON :ivar service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :vartype service_principal_id: JSON @@ -5990,10 +6066,9 @@ class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many- :ivar account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). - :vartype account_kind: str + :vartype account_kind: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). + encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference @@ -6021,12 +6096,12 @@ class AzureBlobStorageLinkedService(LinkedService): # pylint: disable=too-many- "account_key": {"key": "typeProperties.accountKey", "type": "AzureKeyVaultSecretReference"}, "sas_uri": {"key": "typeProperties.sasUri", "type": "object"}, "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, - "service_endpoint": {"key": "typeProperties.serviceEndpoint", "type": "str"}, + "service_endpoint": {"key": "typeProperties.serviceEndpoint", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, - "account_kind": {"key": "typeProperties.accountKind", "type": "str"}, + "account_kind": {"key": "typeProperties.accountKind", "type": "object"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, @@ -6045,12 +6120,12 @@ def __init__( account_key: Optional["_models.AzureKeyVaultSecretReference"] = None, sas_uri: Optional[JSON] = None, sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, - service_endpoint: Optional[str] = None, + service_endpoint: Optional[JSON] = None, service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, - account_kind: Optional[str] = None, + account_kind: Optional[JSON] = None, encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, authentication_type: Optional[Union[str, "_models.AzureStorageAuthenticationType"]] = None, @@ -6082,7 +6157,7 @@ def __init__( :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. - :paramtype service_endpoint: str + :paramtype service_endpoint: JSON :keyword service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :paramtype service_principal_id: JSON @@ -6099,10 +6174,9 @@ def __init__( :keyword account_kind: Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). - :paramtype account_kind: str + :paramtype account_kind: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). + are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference @@ -6234,8 +6308,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): # pylint: disable=too-ma configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -6264,7 +6339,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): # pylint: disable=too-ma "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -6282,7 +6357,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -6315,8 +6390,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -6754,9 +6830,8 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=t this job. Type: string (or Expression with resultType string). :vartype cluster_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or @@ -6779,7 +6854,7 @@ class AzureDatabricksDeltaLakeLinkedService(LinkedService): # pylint: disable=t "domain": {"key": "typeProperties.domain", "type": "object"}, "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, "cluster_id": {"key": "typeProperties.clusterId", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, "workspace_resource_id": {"key": "typeProperties.workspaceResourceId", "type": "object"}, } @@ -6795,7 +6870,7 @@ def __init__( annotations: Optional[List[JSON]] = None, access_token: Optional["_models.SecretBase"] = None, cluster_id: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, workspace_resource_id: Optional[JSON] = None, **kwargs: Any @@ -6823,9 +6898,8 @@ def __init__( of this job. Type: string (or Expression with resultType string). :paramtype cluster_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or @@ -7128,9 +7202,8 @@ class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-i disks are always enabled). Type: boolean (or Expression with resultType boolean). :vartype new_cluster_enable_elastic_disk: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :vartype policy_id: JSON @@ -7166,7 +7239,7 @@ class AzureDatabricksLinkedService(LinkedService): # pylint: disable=too-many-i "new_cluster_driver_node_type": {"key": "typeProperties.newClusterDriverNodeType", "type": "object"}, "new_cluster_init_scripts": {"key": "typeProperties.newClusterInitScripts", "type": "object"}, "new_cluster_enable_elastic_disk": {"key": "typeProperties.newClusterEnableElasticDisk", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "policy_id": {"key": "typeProperties.policyId", "type": "object"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -7195,7 +7268,7 @@ def __init__( # pylint: disable=too-many-locals new_cluster_driver_node_type: Optional[JSON] = None, new_cluster_init_scripts: Optional[JSON] = None, new_cluster_enable_elastic_disk: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, policy_id: Optional[JSON] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any @@ -7271,9 +7344,8 @@ def __init__( # pylint: disable=too-many-locals disks are always enabled). Type: boolean (or Expression with resultType boolean). :paramtype new_cluster_enable_elastic_disk: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword policy_id: The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). :paramtype policy_id: JSON @@ -7334,6 +7406,13 @@ class ExecutionActivity(Activity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -7354,6 +7433,8 @@ class ExecutionActivity(Activity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -7397,6 +7478,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -7411,6 +7494,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -7424,6 +7514,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -7433,7 +7525,7 @@ def __init__( self.policy = policy -class AzureDataExplorerCommandActivity(ExecutionActivity): +class AzureDataExplorerCommandActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. @@ -7447,6 +7539,13 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -7474,6 +7573,8 @@ class AzureDataExplorerCommandActivity(ExecutionActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -7489,6 +7590,8 @@ def __init__( command: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -7504,6 +7607,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -7523,6 +7633,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -8033,9 +8145,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too resultType string). :vartype data_lake_analytics_uri: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -8058,7 +8169,7 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService): # pylint: disable=too "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, "data_lake_analytics_uri": {"key": "typeProperties.dataLakeAnalyticsUri", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -8076,7 +8187,7 @@ def __init__( subscription_id: Optional[JSON] = None, resource_group_name: Optional[JSON] = None, data_lake_analytics_uri: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -8113,9 +8224,8 @@ def __init__( with resultType string). :paramtype data_lake_analytics_uri: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -8309,9 +8419,8 @@ class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-man Factory account). Type: string (or Expression with resultType string). :vartype resource_group_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -8336,7 +8445,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): # pylint: disable=too-man "account_name": {"key": "typeProperties.accountName", "type": "object"}, "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -8356,7 +8465,7 @@ def __init__( account_name: Optional[JSON] = None, subscription_id: Optional[JSON] = None, resource_group_name: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: @@ -8398,9 +8507,8 @@ def __init__( Data Factory account). Type: string (or Expression with resultType string). :paramtype resource_group_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -8516,8 +8624,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): # pylint: disable=too- lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). :vartype list_before: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -8547,7 +8656,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): # pylint: disable=too- "file_list_path": {"key": "fileListPath", "type": "object"}, "list_after": {"key": "listAfter", "type": "object"}, "list_before": {"key": "listBefore", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -8566,7 +8675,7 @@ def __init__( file_list_path: Optional[JSON] = None, list_after: Optional[JSON] = None, list_before: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -8604,8 +8713,9 @@ def __init__( lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). :paramtype list_before: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -8667,7 +8777,8 @@ class AzureDataLakeStoreSink(CopySink): :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_metrics_collection: JSON - :ivar copy_behavior: The type of copy behavior for copy sink. + :ivar copy_behavior: The type of copy behavior for copy sink. Type: string (or Expression with + resultType string). :vartype copy_behavior: JSON :ivar enable_adls_single_file_parallel: Single File Parallel. :vartype enable_adls_single_file_parallel: JSON @@ -8726,7 +8837,8 @@ def __init__( :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_metrics_collection: JSON - :keyword copy_behavior: The type of copy behavior for copy sink. + :keyword copy_behavior: The type of copy behavior for copy sink. Type: string (or Expression + with resultType string). :paramtype copy_behavior: JSON :keyword enable_adls_single_file_parallel: Single File Parallel. :paramtype enable_adls_single_file_parallel: JSON @@ -8849,8 +8961,8 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON :ivar expiry_date_time: Specifies the expiry time of the written files. The time is applied to - the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer - (or Expression with resultType integer). + the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string + (or Expression with resultType string). :vartype expiry_date_time: JSON """ @@ -8891,7 +9003,7 @@ def __init__( :paramtype copy_behavior: JSON :keyword expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: - integer (or Expression with resultType integer). + string (or Expression with resultType string). :paramtype expiry_date_time: JSON """ super().__init__( @@ -8947,9 +9059,8 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- resultType string). :vartype snapshot: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -8972,7 +9083,7 @@ class AzureFileStorageLinkedService(LinkedService): # pylint: disable=too-many- "sas_token": {"key": "typeProperties.sasToken", "type": "AzureKeyVaultSecretReference"}, "file_share": {"key": "typeProperties.fileShare", "type": "object"}, "snapshot": {"key": "typeProperties.snapshot", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -8992,7 +9103,7 @@ def __init__( sas_token: Optional["_models.AzureKeyVaultSecretReference"] = None, file_share: Optional[JSON] = None, snapshot: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -9031,9 +9142,8 @@ def __init__( resultType string). :paramtype snapshot: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -9142,8 +9252,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): # pylint: disable=too-ma configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -9172,7 +9283,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): # pylint: disable=too-ma "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -9190,7 +9301,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -9223,8 +9334,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -9335,6 +9447,13 @@ class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-inst :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -9370,6 +9489,8 @@ class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-inst "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -9388,6 +9509,8 @@ def __init__( function_name: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -9404,6 +9527,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -9430,6 +9560,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -9467,9 +9599,8 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins :ivar function_key: Function or Host key for Azure Function App. :vartype function_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar resource_id: Allowed token audiences for azure function. @@ -9493,7 +9624,7 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins "annotations": {"key": "annotations", "type": "[object]"}, "function_app_url": {"key": "typeProperties.functionAppUrl", "type": "object"}, "function_key": {"key": "typeProperties.functionKey", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, "resource_id": {"key": "typeProperties.resourceId", "type": "object"}, "authentication": {"key": "typeProperties.authentication", "type": "object"}, @@ -9509,7 +9640,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, function_key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, resource_id: Optional[JSON] = None, authentication: Optional[JSON] = None, @@ -9533,9 +9664,8 @@ def __init__( :keyword function_key: Function or Host key for Azure Function App. :paramtype function_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword resource_id: Allowed token audiences for azure function. @@ -9752,9 +9882,8 @@ class AzureMariaDBLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -9770,7 +9899,7 @@ class AzureMariaDBLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -9783,7 +9912,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -9804,9 +9933,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -10040,6 +10168,13 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): # pylint: disable=too-m :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -10074,6 +10209,8 @@ class AzureMLBatchExecutionActivity(ExecutionActivity): # pylint: disable=too-m "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -10089,6 +10226,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -10106,6 +10245,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -10134,6 +10280,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -10160,6 +10308,13 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): # pylint: disable=too- :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -10210,6 +10365,8 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): # pylint: disable=too- "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -10230,6 +10387,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -10252,6 +10411,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -10296,6 +10462,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -10350,9 +10518,8 @@ class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance- (or Expression with resultType string). :vartype tenant: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). :vartype authentication: JSON @@ -10377,7 +10544,7 @@ class AzureMLLinkedService(LinkedService): # pylint: disable=too-many-instance- "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "authentication": {"key": "typeProperties.authentication", "type": "object"}, } @@ -10395,7 +10562,7 @@ def __init__( service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, authentication: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -10430,9 +10597,8 @@ def __init__( string (or Expression with resultType string). :paramtype tenant: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword authentication: Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType string). :paramtype authentication: JSON @@ -10483,6 +10649,9 @@ class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-in :ivar ml_workspace_name: Azure ML Service workspace name. Type: string (or Expression with resultType string). Required. :vartype ml_workspace_name: JSON + :ivar authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :vartype authentication: JSON :ivar service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). @@ -10494,9 +10663,8 @@ class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-in (or Expression with resultType string). :vartype tenant: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -10516,10 +10684,11 @@ class AzureMLServiceLinkedService(LinkedService): # pylint: disable=too-many-in "subscription_id": {"key": "typeProperties.subscriptionId", "type": "object"}, "resource_group_name": {"key": "typeProperties.resourceGroupName", "type": "object"}, "ml_workspace_name": {"key": "typeProperties.mlWorkspaceName", "type": "object"}, + "authentication": {"key": "typeProperties.authentication", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -10533,10 +10702,11 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, + authentication: Optional[JSON] = None, service_principal_id: Optional[JSON] = None, service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -10560,6 +10730,9 @@ def __init__( :keyword ml_workspace_name: Azure ML Service workspace name. Type: string (or Expression with resultType string). Required. :paramtype ml_workspace_name: JSON + :keyword authentication: Type of authentication (Required to specify MSI) used to connect to + AzureML. Type: string (or Expression with resultType string). + :paramtype authentication: JSON :keyword service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). @@ -10571,9 +10744,8 @@ def __init__( string (or Expression with resultType string). :paramtype tenant: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -10587,6 +10759,7 @@ def __init__( self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.ml_workspace_name = ml_workspace_name + self.authentication = authentication self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant @@ -10607,6 +10780,13 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): # pylint: disable=too-m :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -10641,6 +10821,8 @@ class AzureMLUpdateResourceActivity(ExecutionActivity): # pylint: disable=too-m "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -10662,6 +10844,8 @@ def __init__( trained_model_file_path: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -10676,6 +10860,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -10700,6 +10891,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -10775,9 +10968,8 @@ class AzureMySqlLinkedService(LinkedService): :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -10794,7 +10986,7 @@ class AzureMySqlLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -10807,7 +10999,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -10828,9 +11020,8 @@ def __init__( :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -11185,9 +11376,8 @@ class AzurePostgreSqlLinkedService(LinkedService): :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -11203,7 +11393,7 @@ class AzurePostgreSqlLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -11216,7 +11406,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -11237,9 +11427,8 @@ def __init__( :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -11907,9 +12096,8 @@ class AzureSearchLinkedService(LinkedService): :ivar key: Admin Key for Azure Search service. :vartype key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -11926,7 +12114,7 @@ class AzureSearchLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "url": {"key": "typeProperties.url", "type": "object"}, "key": {"key": "typeProperties.key", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -11939,7 +12127,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, key: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -11960,9 +12148,8 @@ def __init__( :keyword key: Admin Key for Azure Search service. :paramtype key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -12015,9 +12202,8 @@ class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many- factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :ivar credential: The credential reference containing authentication information. @@ -12042,7 +12228,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): # pylint: disable=too-many- "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "always_encrypted_settings": { "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", @@ -12064,7 +12250,7 @@ def __init__( service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any @@ -12100,9 +12286,8 @@ def __init__( the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties @@ -12167,9 +12352,8 @@ class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instan factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -12192,7 +12376,7 @@ class AzureSqlDWLinkedService(LinkedService): # pylint: disable=too-many-instan "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -12210,7 +12394,7 @@ def __init__( service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: @@ -12246,9 +12430,8 @@ def __init__( the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -12430,9 +12613,8 @@ class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instan factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties :ivar credential: The credential reference containing authentication information. @@ -12457,7 +12639,7 @@ class AzureSqlMILinkedService(LinkedService): # pylint: disable=too-many-instan "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "always_encrypted_settings": { "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", @@ -12479,7 +12661,7 @@ def __init__( service_principal_key: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any @@ -12515,9 +12697,8 @@ def __init__( the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties @@ -12876,6 +13057,10 @@ class AzureSqlSource(TabularSource): # pylint: disable=too-many-instance-attrib :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: JSON + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :vartype isolation_level: JSON :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -12901,6 +13086,7 @@ class AzureSqlSource(TabularSource): # pylint: disable=too-many-instance-attrib "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, "partition_option": {"key": "partitionOption", "type": "object"}, "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, @@ -12919,6 +13105,7 @@ def __init__( sql_reader_query: Optional[JSON] = None, sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[JSON] = None, + isolation_level: Optional[JSON] = None, produce_additional_types: Optional[JSON] = None, partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, @@ -12956,6 +13143,10 @@ def __init__( :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: JSON + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: JSON :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -12978,6 +13169,7 @@ def __init__( self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings @@ -13134,8 +13326,7 @@ class AzureStorageLinkedService(LinkedService): # pylint: disable=too-many-inst :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). + encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str """ @@ -13195,8 +13386,7 @@ def __init__( :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). + are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str """ super().__init__( @@ -13686,8 +13876,7 @@ class AzureTableStorageLinkedService(LinkedService): # pylint: disable=too-many :ivar sas_token: The Azure key vault secret reference of sasToken in sas uri. :vartype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). + encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str """ @@ -13747,8 +13936,7 @@ def __init__( :keyword sas_token: The Azure key vault secret reference of sasToken in sas uri. :paramtype sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). + are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str """ super().__init__( @@ -13917,7 +14105,8 @@ class FormatReadSettings(_serialization.Model): """Format read settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings + BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, ParquetReadSettings, + XmlReadSettings All required parameters must be populated in order to send to Azure. @@ -13942,6 +14131,7 @@ class FormatReadSettings(_serialization.Model): "BinaryReadSettings": "BinaryReadSettings", "DelimitedTextReadSettings": "DelimitedTextReadSettings", "JsonReadSettings": "JsonReadSettings", + "ParquetReadSettings": "ParquetReadSettings", "XmlReadSettings": "XmlReadSettings", } } @@ -14814,9 +15004,8 @@ class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instanc :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -14836,7 +15025,7 @@ class CassandraLinkedService(LinkedService): # pylint: disable=too-many-instanc "port": {"key": "typeProperties.port", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -14852,7 +15041,7 @@ def __init__( port: Optional[JSON] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -14882,9 +15071,8 @@ def __init__( :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -15225,6 +15413,212 @@ def __init__( self.run_dimension = run_dimension +class ChangeDataCaptureFolder(_serialization.Model): + """The folder that this CDC is in. If not specified, CDC will appear at the root level. + + :ivar name: The name of the folder that this CDC is in. + :vartype name: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword name: The name of the folder that this CDC is in. + :paramtype name: str + """ + super().__init__(**kwargs) + self.name = name + + +class ChangeDataCaptureListResponse(_serialization.Model): + """A list of change data capture resources. + + All required parameters must be populated in order to send to Azure. + + :ivar value: Lists all resources of type change data capture. Required. + :vartype value: list[~azure.mgmt.datafactory.models.ChangeDataCaptureResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str + """ + + _validation = { + "value": {"required": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[ChangeDataCaptureResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: List["_models.ChangeDataCaptureResource"], next_link: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword value: Lists all resources of type change data capture. Required. + :paramtype value: list[~azure.mgmt.datafactory.models.ChangeDataCaptureResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SubResource(_serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class ChangeDataCaptureResource(SubResource): # pylint: disable=too-many-instance-attributes + """Change data capture resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar folder: The folder that this CDC is in. If not specified, CDC will appear at the root + level. + :vartype folder: ~azure.mgmt.datafactory.models.ChangeDataCaptureFolder + :ivar description: The description of the change data capture. + :vartype description: str + :ivar source_connections_info: List of sources connections that can be used as sources in the + CDC. Required. + :vartype source_connections_info: + list[~azure.mgmt.datafactory.models.MapperSourceConnectionsInfo] + :ivar target_connections_info: List of target connections that can be used as sources in the + CDC. Required. + :vartype target_connections_info: + list[~azure.mgmt.datafactory.models.MapperTargetConnectionsInfo] + :ivar policy: CDC policy. Required. + :vartype policy: ~azure.mgmt.datafactory.models.MapperPolicy + :ivar allow_v_net_override: A boolean to determine if the vnet configuration needs to be + overwritten. + :vartype allow_v_net_override: bool + :ivar status: Status of the CDC as to if it is running or stopped. + :vartype status: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "etag": {"readonly": True}, + "source_connections_info": {"required": True}, + "target_connections_info": {"required": True}, + "policy": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "etag": {"key": "etag", "type": "str"}, + "additional_properties": {"key": "", "type": "{object}"}, + "folder": {"key": "properties.folder", "type": "ChangeDataCaptureFolder"}, + "description": {"key": "properties.description", "type": "str"}, + "source_connections_info": {"key": "properties.sourceConnectionsInfo", "type": "[MapperSourceConnectionsInfo]"}, + "target_connections_info": {"key": "properties.targetConnectionsInfo", "type": "[MapperTargetConnectionsInfo]"}, + "policy": {"key": "properties.policy", "type": "MapperPolicy"}, + "allow_v_net_override": {"key": "properties.allowVNetOverride", "type": "bool"}, + "status": {"key": "properties.status", "type": "str"}, + } + + def __init__( + self, + *, + source_connections_info: List["_models.MapperSourceConnectionsInfo"], + target_connections_info: List["_models.MapperTargetConnectionsInfo"], + policy: "_models.MapperPolicy", + additional_properties: Optional[Dict[str, JSON]] = None, + folder: Optional["_models.ChangeDataCaptureFolder"] = None, + description: Optional[str] = None, + allow_v_net_override: Optional[bool] = None, + status: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword folder: The folder that this CDC is in. If not specified, CDC will appear at the root + level. + :paramtype folder: ~azure.mgmt.datafactory.models.ChangeDataCaptureFolder + :keyword description: The description of the change data capture. + :paramtype description: str + :keyword source_connections_info: List of sources connections that can be used as sources in + the CDC. Required. + :paramtype source_connections_info: + list[~azure.mgmt.datafactory.models.MapperSourceConnectionsInfo] + :keyword target_connections_info: List of target connections that can be used as sources in the + CDC. Required. + :paramtype target_connections_info: + list[~azure.mgmt.datafactory.models.MapperTargetConnectionsInfo] + :keyword policy: CDC policy. Required. + :paramtype policy: ~azure.mgmt.datafactory.models.MapperPolicy + :keyword allow_v_net_override: A boolean to determine if the vnet configuration needs to be + overwritten. + :paramtype allow_v_net_override: bool + :keyword status: Status of the CDC as to if it is running or stopped. + :paramtype status: str + """ + super().__init__(**kwargs) + self.additional_properties = additional_properties + self.folder = folder + self.description = description + self.source_connections_info = source_connections_info + self.target_connections_info = target_connections_info + self.policy = policy + self.allow_v_net_override = allow_v_net_override + self.status = status + + class CloudError(_serialization.Model): """The object that defines the structure of an Azure Data Factory error response. @@ -15513,9 +15907,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t be AzureKeyVaultSecretReference. :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -15542,7 +15935,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): # pylint: disable=t "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -15564,7 +15957,7 @@ def __init__( service_principal_id: Optional[JSON] = None, service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -15625,9 +16018,8 @@ def __init__( be AzureKeyVaultSecretReference. :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -15987,9 +16379,8 @@ class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-a connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -16012,7 +16403,7 @@ class ConcurLinkedService(LinkedService): # pylint: disable=too-many-instance-a "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -16030,7 +16421,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -16066,9 +16457,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -16340,6 +16730,13 @@ class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attr :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -16405,6 +16802,8 @@ class CopyActivity(ExecutionActivity): # pylint: disable=too-many-instance-attr "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -16439,6 +16838,8 @@ def __init__( # pylint: disable=too-many-locals sink: "_models.CopySink", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -16468,6 +16869,13 @@ def __init__( # pylint: disable=too-many-locals :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -16524,6 +16932,8 @@ def __init__( # pylint: disable=too-many-locals additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -16703,10 +17113,8 @@ class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance :vartype service_principal_id: JSON :ivar service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Known values are: - "ServicePrincipalKey" and "ServicePrincipalCert". - :vartype service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType + for certificate. Type: string. + :vartype service_principal_credential_type: JSON :ivar service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -16720,13 +17128,12 @@ class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON - :ivar connection_mode: The connection mode used to access CosmosDB account. Type: string (or - Expression with resultType string). Known values are: "Gateway" and "Direct". + :ivar connection_mode: The connection mode used to access CosmosDB account. Type: string. Known + values are: "Gateway" and "Direct". :vartype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -16747,12 +17154,12 @@ class CosmosDbLinkedService(LinkedService): # pylint: disable=too-many-instance "database": {"key": "typeProperties.database", "type": "object"}, "account_key": {"key": "typeProperties.accountKey", "type": "SecretBase"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, - "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "str"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, "connection_mode": {"key": "typeProperties.connectionMode", "type": "str"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -16769,14 +17176,12 @@ def __init__( database: Optional[JSON] = None, account_key: Optional["_models.SecretBase"] = None, service_principal_id: Optional[JSON] = None, - service_principal_credential_type: Optional[ - Union[str, "_models.CosmosDbServicePrincipalCredentialType"] - ] = None, + service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, connection_mode: Optional[Union[str, "_models.CosmosDbConnectionMode"]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: @@ -16809,10 +17214,8 @@ def __init__( :paramtype service_principal_id: JSON :keyword service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Known values are: - "ServicePrincipalKey" and "ServicePrincipalCert". - :paramtype service_principal_credential_type: str or - ~azure.mgmt.datafactory.models.CosmosDbServicePrincipalCredentialType + for certificate. Type: string. + :paramtype service_principal_credential_type: JSON :keyword service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -16826,13 +17229,12 @@ def __init__( Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON - :keyword connection_mode: The connection mode used to access CosmosDB account. Type: string (or - Expression with resultType string). Known values are: "Gateway" and "Direct". + :keyword connection_mode: The connection mode used to access CosmosDB account. Type: string. + Known values are: "Gateway" and "Direct". :paramtype connection_mode: str or ~azure.mgmt.datafactory.models.CosmosDbConnectionMode :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -17639,9 +18041,8 @@ class CouchbaseLinkedService(LinkedService): :ivar cred_string: The Azure key vault secret reference of credString in connection string. :vartype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -17657,7 +18058,7 @@ class CouchbaseLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "cred_string": {"key": "typeProperties.credString", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -17670,7 +18071,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, cred_string: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -17691,9 +18092,8 @@ def __init__( :keyword cred_string: The Azure key vault secret reference of credString in connection string. :paramtype cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -18213,44 +18613,6 @@ def __init__( self.reference_name = reference_name -class SubResource(_serialization.Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - "id": {"readonly": True}, - "name": {"readonly": True}, - "type": {"readonly": True}, - "etag": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "type": {"key": "type", "type": "str"}, - "etag": {"key": "etag", "type": "str"}, - } - - def __init__(self, **kwargs: Any) -> None: - """ """ - super().__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class CredentialResource(SubResource): """Credential resource type. @@ -18309,6 +18671,13 @@ class CustomActivity(ExecutionActivity): # pylint: disable=too-many-instance-at :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -18350,6 +18719,8 @@ class CustomActivity(ExecutionActivity): # pylint: disable=too-many-instance-at "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -18370,6 +18741,8 @@ def __init__( command: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -18390,6 +18763,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -18423,6 +18803,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -18763,6 +19145,13 @@ class DatabricksNotebookActivity(ExecutionActivity): # pylint: disable=too-many :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -18793,6 +19182,8 @@ class DatabricksNotebookActivity(ExecutionActivity): # pylint: disable=too-many "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -18809,6 +19200,8 @@ def __init__( notebook_path: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -18825,6 +19218,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -18848,6 +19248,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -18874,6 +19276,13 @@ class DatabricksSparkJarActivity(ExecutionActivity): # pylint: disable=too-many :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -18903,6 +19312,8 @@ class DatabricksSparkJarActivity(ExecutionActivity): # pylint: disable=too-many "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -18919,6 +19330,8 @@ def __init__( main_class_name: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -18935,6 +19348,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -18957,6 +19377,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -18983,6 +19405,13 @@ class DatabricksSparkPythonActivity(ExecutionActivity): # pylint: disable=too-m :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -19011,6 +19440,8 @@ class DatabricksSparkPythonActivity(ExecutionActivity): # pylint: disable=too-m "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -19027,6 +19458,8 @@ def __init__( python_file: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -19043,6 +19476,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -19064,6 +19504,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -19962,6 +20404,13 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): # pylint: disable=too-m :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -20004,6 +20453,8 @@ class DataLakeAnalyticsUSQLActivity(ExecutionActivity): # pylint: disable=too-m "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -20025,6 +20476,8 @@ def __init__( script_linked_service: "_models.LinkedServiceReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -20044,6 +20497,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -20077,6 +20537,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -20093,6 +20555,62 @@ def __init__( self.compilation_mode = compilation_mode +class DataMapperMapping(_serialization.Model): + """Source and target table mapping details. + + :ivar target_entity_name: Name of the target table. + :vartype target_entity_name: str + :ivar source_entity_name: Name of the source table. + :vartype source_entity_name: str + :ivar source_connection_reference: The connection reference for the source connection. + :vartype source_connection_reference: ~azure.mgmt.datafactory.models.MapperConnectionReference + :ivar attribute_mapping_info: This holds the user provided attribute mapping information. + :vartype attribute_mapping_info: ~azure.mgmt.datafactory.models.MapperAttributeMappings + :ivar source_denormalize_info: This holds the source denormalization information used while + joining multiple sources. + :vartype source_denormalize_info: JSON + """ + + _attribute_map = { + "target_entity_name": {"key": "targetEntityName", "type": "str"}, + "source_entity_name": {"key": "sourceEntityName", "type": "str"}, + "source_connection_reference": {"key": "sourceConnectionReference", "type": "MapperConnectionReference"}, + "attribute_mapping_info": {"key": "attributeMappingInfo", "type": "MapperAttributeMappings"}, + "source_denormalize_info": {"key": "sourceDenormalizeInfo", "type": "object"}, + } + + def __init__( + self, + *, + target_entity_name: Optional[str] = None, + source_entity_name: Optional[str] = None, + source_connection_reference: Optional["_models.MapperConnectionReference"] = None, + attribute_mapping_info: Optional["_models.MapperAttributeMappings"] = None, + source_denormalize_info: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword target_entity_name: Name of the target table. + :paramtype target_entity_name: str + :keyword source_entity_name: Name of the source table. + :paramtype source_entity_name: str + :keyword source_connection_reference: The connection reference for the source connection. + :paramtype source_connection_reference: + ~azure.mgmt.datafactory.models.MapperConnectionReference + :keyword attribute_mapping_info: This holds the user provided attribute mapping information. + :paramtype attribute_mapping_info: ~azure.mgmt.datafactory.models.MapperAttributeMappings + :keyword source_denormalize_info: This holds the source denormalization information used while + joining multiple sources. + :paramtype source_denormalize_info: JSON + """ + super().__init__(**kwargs) + self.target_entity_name = target_entity_name + self.source_entity_name = source_entity_name + self.source_connection_reference = source_connection_reference + self.attribute_mapping_info = attribute_mapping_info + self.source_denormalize_info = source_denormalize_info + + class DatasetCompression(_serialization.Model): """The compression method used on a dataset. @@ -20407,9 +20925,8 @@ class DataworldLinkedService(LinkedService): :ivar api_token: The api token for the Dataworld source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -20425,7 +20942,7 @@ class DataworldLinkedService(LinkedService): "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -20437,7 +20954,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -20455,9 +20972,8 @@ def __init__( :keyword api_token: The api token for the Dataworld source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -20517,8 +21033,8 @@ class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attr :vartype certificate_common_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with - connectionString property. Type: string (or Expression with resultType string). - :vartype encrypted_credential: JSON + connectionString property. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -20540,7 +21056,7 @@ class Db2LinkedService(LinkedService): # pylint: disable=too-many-instance-attr "password": {"key": "typeProperties.password", "type": "SecretBase"}, "package_collection": {"key": "typeProperties.packageCollection", "type": "object"}, "certificate_common_name": {"key": "typeProperties.certificateCommonName", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -20559,7 +21075,7 @@ def __init__( password: Optional["_models.SecretBase"] = None, package_collection: Optional[JSON] = None, certificate_common_name: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -20601,8 +21117,8 @@ def __init__( :paramtype certificate_common_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with - connectionString property. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: JSON + connectionString property. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -20858,6 +21374,13 @@ class DeleteActivity(ExecutionActivity): # pylint: disable=too-many-instance-at :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -20896,6 +21419,8 @@ class DeleteActivity(ExecutionActivity): # pylint: disable=too-many-instance-at "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -20915,6 +21440,8 @@ def __init__( dataset: "_models.DatasetReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -20934,6 +21461,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -20963,6 +21497,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -21949,9 +22485,8 @@ class DrillLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -21967,7 +22502,7 @@ class DrillLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -21980,7 +22515,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -22001,9 +22536,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -22350,9 +22884,8 @@ class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instan Expression with resultType string). Required. :vartype aad_resource_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -22376,7 +22909,7 @@ class DynamicsAXLinkedService(LinkedService): # pylint: disable=too-many-instan "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, "tenant": {"key": "typeProperties.tenant", "type": "object"}, "aad_resource_id": {"key": "typeProperties.aadResourceId", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -22392,7 +22925,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -22425,9 +22958,8 @@ def __init__( (or Expression with resultType string). Required. :paramtype aad_resource_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -22830,9 +23362,8 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta be AzureKeyVaultSecretReference. :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -22859,7 +23390,7 @@ class DynamicsCrmLinkedService(LinkedService): # pylint: disable=too-many-insta "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -22881,7 +23412,7 @@ def __init__( service_principal_id: Optional[JSON] = None, service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -22939,9 +23470,8 @@ def __init__( be AzureKeyVaultSecretReference. :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -23344,9 +23874,8 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance be AzureKeyVaultSecretReference. :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -23375,7 +23904,7 @@ class DynamicsLinkedService(LinkedService): # pylint: disable=too-many-instance "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, } @@ -23398,7 +23927,7 @@ def __init__( service_principal_id: Optional[JSON] = None, service_principal_credential_type: Optional[JSON] = None, service_principal_credential: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, **kwargs: Any ) -> None: @@ -23456,9 +23985,8 @@ def __init__( be AzureKeyVaultSecretReference. :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference """ @@ -23736,9 +24264,8 @@ class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-a connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -23760,7 +24287,7 @@ class EloquaLinkedService(LinkedService): # pylint: disable=too-many-instance-a "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -23777,7 +24304,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -23810,9 +24337,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -24427,6 +24953,13 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -24469,6 +25002,8 @@ class ExecuteDataFlowActivity(ExecutionActivity): # pylint: disable=too-many-in "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -24490,6 +25025,8 @@ def __init__( data_flow: "_models.DataFlowReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -24511,6 +25048,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -24545,6 +25089,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -24687,7 +25233,7 @@ def __init__( self.core_count = core_count -class ExecutePipelineActivity(ControlActivity): +class ExecutePipelineActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """Execute pipeline activity. All required parameters must be populated in order to send to Azure. @@ -24701,6 +25247,13 @@ class ExecutePipelineActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -24727,6 +25280,8 @@ class ExecutePipelineActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "policy": {"key": "policy", "type": "ExecutePipelineActivityPolicy"}, @@ -24742,6 +25297,8 @@ def __init__( pipeline: "_models.PipelineReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, policy: Optional["_models.ExecutePipelineActivityPolicy"] = None, @@ -24757,6 +25314,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -24775,6 +25339,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -24944,6 +25510,13 @@ class ExecuteSSISPackageActivity(ExecutionActivity): # pylint: disable=too-many :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -24997,6 +25570,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): # pylint: disable=too-many "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -25029,6 +25604,8 @@ def __init__( connect_via: "_models.IntegrationRuntimeReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -25053,6 +25630,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -25097,6 +25681,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -25132,6 +25718,13 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -25177,6 +25770,8 @@ class ExecuteWranglingDataflowActivity(Activity): # pylint: disable=too-many-in "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "policy": {"key": "policy", "type": "ActivityPolicy"}, @@ -25199,6 +25794,8 @@ def __init__( data_flow: "_models.DataFlowReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, policy: Optional["_models.ActivityPolicy"] = None, @@ -25221,6 +25818,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -25258,6 +25862,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -26044,6 +26650,13 @@ class FailActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -26070,6 +26683,8 @@ class FailActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "message": {"key": "typeProperties.message", "type": "object"}, @@ -26084,6 +26699,8 @@ def __init__( error_code: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, **kwargs: Any @@ -26096,6 +26713,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -26113,6 +26737,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -26149,9 +26775,8 @@ class FileServerLinkedService(LinkedService): :ivar password: Password to logon the server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -26169,7 +26794,7 @@ class FileServerLinkedService(LinkedService): "host": {"key": "typeProperties.host", "type": "object"}, "user_id": {"key": "typeProperties.userId", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -26183,7 +26808,7 @@ def __init__( annotations: Optional[List[JSON]] = None, user_id: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -26207,9 +26832,8 @@ def __init__( :keyword password: Password to logon the server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -26309,8 +26933,9 @@ class FileServerReadSettings(StoreReadSettings): # pylint: disable=too-many-ins configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -26341,7 +26966,7 @@ class FileServerReadSettings(StoreReadSettings): # pylint: disable=too-many-ins "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -26359,7 +26984,7 @@ def __init__( wildcard_folder_path: Optional[JSON] = None, wildcard_file_name: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -26390,8 +27015,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -26852,6 +27478,13 @@ class FilterActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -26874,6 +27507,8 @@ class FilterActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "items": {"key": "typeProperties.items", "type": "Expression"}, @@ -26888,6 +27523,8 @@ def __init__( condition: "_models.Expression", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, **kwargs: Any @@ -26900,6 +27537,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -26913,6 +27557,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -27005,7 +27651,7 @@ def __init__( self.script_lines = script_lines -class ForEachActivity(ControlActivity): +class ForEachActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. @@ -27019,6 +27665,13 @@ class ForEachActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -27047,6 +27700,8 @@ class ForEachActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "is_sequential": {"key": "typeProperties.isSequential", "type": "bool"}, @@ -27063,6 +27718,8 @@ def __init__( activities: List["_models.Activity"], additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, is_sequential: Optional[bool] = None, @@ -27077,6 +27734,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -27095,6 +27759,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -27131,8 +27797,9 @@ class FtpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-a :ivar wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :vartype wildcard_file_name: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -27143,8 +27810,9 @@ class FtpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-a configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. - :vartype use_binary_transfer: bool + :ivar use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. Type: + boolean (or Expression with resultType boolean). + :vartype use_binary_transfer: JSON :ivar disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_chunking: JSON @@ -27162,11 +27830,11 @@ class FtpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-a "recursive": {"key": "recursive", "type": "object"}, "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "use_binary_transfer": {"key": "useBinaryTransfer", "type": "bool"}, + "use_binary_transfer": {"key": "useBinaryTransfer", "type": "object"}, "disable_chunking": {"key": "disableChunking", "type": "object"}, } @@ -27179,11 +27847,11 @@ def __init__( recursive: Optional[JSON] = None, wildcard_folder_path: Optional[JSON] = None, wildcard_file_name: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - use_binary_transfer: Optional[bool] = None, + use_binary_transfer: Optional[JSON] = None, disable_chunking: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -27206,8 +27874,9 @@ def __init__( :keyword wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :paramtype wildcard_file_name: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -27218,8 +27887,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. - :paramtype use_binary_transfer: bool + :keyword use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. Type: + boolean (or Expression with resultType boolean). + :paramtype use_binary_transfer: JSON :keyword disable_chunking: If true, disable parallel reading within each file. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_chunking: JSON @@ -27275,9 +27945,8 @@ class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :ivar password: Password to logon the FTP server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). :vartype enable_ssl: JSON @@ -27304,7 +27973,7 @@ class FtpServerLinkedService(LinkedService): # pylint: disable=too-many-instanc "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, "enable_server_certificate_validation": { "key": "typeProperties.enableServerCertificateValidation", @@ -27325,7 +27994,7 @@ def __init__( authentication_type: Optional[Union[str, "_models.FtpAuthenticationType"]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, enable_ssl: Optional[JSON] = None, enable_server_certificate_validation: Optional[JSON] = None, **kwargs: Any @@ -27357,9 +28026,8 @@ def __init__( :keyword password: Password to logon the FTP server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). :paramtype enable_ssl: JSON @@ -27485,6 +28153,13 @@ class GetMetadataActivity(ExecutionActivity): # pylint: disable=too-many-instan :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -27514,6 +28189,8 @@ class GetMetadataActivity(ExecutionActivity): # pylint: disable=too-many-instan "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -27531,6 +28208,8 @@ def __init__( dataset: "_models.DatasetReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -27548,6 +28227,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -27569,6 +28255,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -27834,11 +28522,8 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar connection_properties: Properties used to connect to GoogleAds. It is mutually exclusive - with any other properties in the linked service. Type: object. - :vartype connection_properties: JSON :ivar client_customer_id: The Client customer ID of the AdWords account that you want to fetch - report data for. + report data for. Type: string (or Expression with resultType string). :vartype client_customer_id: JSON :ivar developer_token: The developer token associated with the manager account that you use to grant access to the AdWords API. @@ -27858,22 +28543,28 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins token. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. + be used on self-hosted IR. Type: string (or Expression with resultType string). :vartype email: JSON - :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :vartype key_file_path: JSON - :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for - verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :vartype trusted_cert_path: JSON - :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. - :vartype use_system_trust_store: JSON + :ivar private_key: The private key that is used to authenticate the service account email + address and can only be used on self-hosted IR. + :vartype private_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar login_customer_id: The customer ID of the Google Ads Manager account through which you + want to fetch report data of specific Customer. Type: string (or Expression with resultType + string). + :vartype login_customer_id: JSON + :ivar google_ads_api_version: The Google Ads API major version such as v14. The supported major + versions could be found on + https://developers.google.com/google-ads/api/docs/sunset-dates#timetable. Type: string (or + Expression with resultType string). + :vartype google_ads_api_version: JSON + :ivar support_legacy_data_types: Specifies whether to use the legacy data type mappings, which + maps float, int32 and int64 from Google to string. Do not set this to true unless you want to + keep backward compatibility with legacy driver's data type mappings. Type: boolean (or + Expression with resultType boolean). + :vartype support_legacy_data_types: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -27887,7 +28578,6 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, - "connection_properties": {"key": "typeProperties.connectionProperties", "type": "object"}, "client_customer_id": {"key": "typeProperties.clientCustomerID", "type": "object"}, "developer_token": {"key": "typeProperties.developerToken", "type": "SecretBase"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, @@ -27895,10 +28585,11 @@ class GoogleAdWordsLinkedService(LinkedService): # pylint: disable=too-many-ins "client_id": {"key": "typeProperties.clientId", "type": "object"}, "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, "email": {"key": "typeProperties.email", "type": "object"}, - "key_file_path": {"key": "typeProperties.keyFilePath", "type": "object"}, - "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, - "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "private_key": {"key": "typeProperties.privateKey", "type": "SecretBase"}, + "login_customer_id": {"key": "typeProperties.loginCustomerID", "type": "object"}, + "google_ads_api_version": {"key": "typeProperties.googleAdsApiVersion", "type": "object"}, + "support_legacy_data_types": {"key": "typeProperties.supportLegacyDataTypes", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -27909,7 +28600,6 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - connection_properties: Optional[JSON] = None, client_customer_id: Optional[JSON] = None, developer_token: Optional["_models.SecretBase"] = None, authentication_type: Optional[Union[str, "_models.GoogleAdWordsAuthenticationType"]] = None, @@ -27917,10 +28607,11 @@ def __init__( client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, email: Optional[JSON] = None, - key_file_path: Optional[JSON] = None, - trusted_cert_path: Optional[JSON] = None, - use_system_trust_store: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + private_key: Optional["_models.SecretBase"] = None, + login_customer_id: Optional[JSON] = None, + google_ads_api_version: Optional[JSON] = None, + support_legacy_data_types: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -27935,11 +28626,8 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword connection_properties: Properties used to connect to GoogleAds. It is mutually - exclusive with any other properties in the linked service. Type: object. - :paramtype connection_properties: JSON :keyword client_customer_id: The Client customer ID of the AdWords account that you want to - fetch report data for. + fetch report data for. Type: string (or Expression with resultType string). :paramtype client_customer_id: JSON :keyword developer_token: The developer token associated with the manager account that you use to grant access to the AdWords API. @@ -27959,22 +28647,28 @@ def __init__( token. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword email: The service account email ID that is used for ServiceAuthentication and can - only be used on self-hosted IR. + only be used on self-hosted IR. Type: string (or Expression with resultType string). :paramtype email: JSON - :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. - :paramtype key_file_path: JSON - :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates - for verifying the server when connecting over SSL. This property can only be set when using SSL - on self-hosted IR. The default value is the cacerts.pem file installed with the IR. - :paramtype trusted_cert_path: JSON - :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system - trust store or from a specified PEM file. The default value is false. - :paramtype use_system_trust_store: JSON + :keyword private_key: The private key that is used to authenticate the service account email + address and can only be used on self-hosted IR. + :paramtype private_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword login_customer_id: The customer ID of the Google Ads Manager account through which you + want to fetch report data of specific Customer. Type: string (or Expression with resultType + string). + :paramtype login_customer_id: JSON + :keyword google_ads_api_version: The Google Ads API major version such as v14. The supported + major versions could be found on + https://developers.google.com/google-ads/api/docs/sunset-dates#timetable. Type: string (or + Expression with resultType string). + :paramtype google_ads_api_version: JSON + :keyword support_legacy_data_types: Specifies whether to use the legacy data type mappings, + which maps float, int32 and int64 from Google to string. Do not set this to true unless you + want to keep backward compatibility with legacy driver's data type mappings. Type: boolean (or + Expression with resultType boolean). + :paramtype support_legacy_data_types: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -27985,7 +28679,6 @@ def __init__( **kwargs ) self.type: str = "GoogleAdWords" - self.connection_properties = connection_properties self.client_customer_id = client_customer_id self.developer_token = developer_token self.authentication_type = authentication_type @@ -27993,9 +28686,10 @@ def __init__( self.client_id = client_id self.client_secret = client_secret self.email = email - self.key_file_path = key_file_path - self.trusted_cert_path = trusted_cert_path - self.use_system_trust_store = use_system_trust_store + self.private_key = private_key + self.login_customer_id = login_customer_id + self.google_ads_api_version = google_ads_api_version + self.support_legacy_data_types = support_legacy_data_types self.encrypted_credential = encrypted_credential @@ -28221,13 +28915,15 @@ class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-in :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar project: The default BigQuery project to query against. Required. + :ivar project: The default BigQuery project to query against. Type: string (or Expression with + resultType string). Required. :vartype project: JSON - :ivar additional_projects: A comma-separated list of public BigQuery projects to access. + :ivar additional_projects: A comma-separated list of public BigQuery projects to access. Type: + string (or Expression with resultType string). :vartype additional_projects: JSON :ivar request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. + Google Drive. The default value is false. Type: string (or Expression with resultType string). :vartype request_google_drive_scope: JSON :ivar authentication_type: The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Required. Known values are: @@ -28244,22 +28940,24 @@ class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-in token. :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase :ivar email: The service account email ID that is used for ServiceAuthentication and can only - be used on self-hosted IR. + be used on self-hosted IR. Type: string (or Expression with resultType string). :vartype email: JSON :ivar key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. + service account email address and can only be used on self-hosted IR. Type: string (or + Expression with resultType string). :vartype key_file_path: JSON :ivar trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on - self-hosted IR. The default value is the cacerts.pem file installed with the IR. + self-hosted IR. The default value is the cacerts.pem file installed with the IR. Type: string + (or Expression with resultType string). :vartype trusted_cert_path: JSON :ivar use_system_trust_store: Specifies whether to use a CA certificate from the system trust - store or from a specified PEM file. The default value is false. + store or from a specified PEM file. The default value is false.Type: boolean (or Expression + with resultType boolean). :vartype use_system_trust_store: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -28286,7 +28984,7 @@ class GoogleBigQueryLinkedService(LinkedService): # pylint: disable=too-many-in "key_file_path": {"key": "typeProperties.keyFilePath", "type": "object"}, "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -28308,7 +29006,7 @@ def __init__( key_file_path: Optional[JSON] = None, trusted_cert_path: Optional[JSON] = None, use_system_trust_store: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -28323,13 +29021,15 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword project: The default BigQuery project to query against. Required. + :keyword project: The default BigQuery project to query against. Type: string (or Expression + with resultType string). Required. :paramtype project: JSON :keyword additional_projects: A comma-separated list of public BigQuery projects to access. + Type: string (or Expression with resultType string). :paramtype additional_projects: JSON :keyword request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from - Google Drive. The default value is false. + Google Drive. The default value is false. Type: string (or Expression with resultType string). :paramtype request_google_drive_scope: JSON :keyword authentication_type: The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Required. Known values are: @@ -28346,22 +29046,24 @@ def __init__( token. :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase :keyword email: The service account email ID that is used for ServiceAuthentication and can - only be used on self-hosted IR. + only be used on self-hosted IR. Type: string (or Expression with resultType string). :paramtype email: JSON :keyword key_file_path: The full path to the .p12 key file that is used to authenticate the - service account email address and can only be used on self-hosted IR. + service account email address and can only be used on self-hosted IR. Type: string (or + Expression with resultType string). :paramtype key_file_path: JSON :keyword trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL - on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + on self-hosted IR. The default value is the cacerts.pem file installed with the IR. Type: + string (or Expression with resultType string). :paramtype trusted_cert_path: JSON :keyword use_system_trust_store: Specifies whether to use a CA certificate from the system - trust store or from a specified PEM file. The default value is false. + trust store or from a specified PEM file. The default value is false.Type: boolean (or + Expression with resultType boolean). :paramtype use_system_trust_store: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -28640,9 +29342,8 @@ class GoogleCloudStorageLinkedService(LinkedService): string). :vartype service_url: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -28659,7 +29360,7 @@ class GoogleCloudStorageLinkedService(LinkedService): "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -28673,7 +29374,7 @@ def __init__( access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, service_url: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -28700,9 +29401,8 @@ def __init__( string). :paramtype service_url: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -28823,8 +29523,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too- configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -28853,7 +29554,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too- "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -28871,7 +29572,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -28904,8 +29605,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -28959,9 +29661,8 @@ class GoogleSheetsLinkedService(LinkedService): :ivar api_token: The api token for the GoogleSheets source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -28977,7 +29678,7 @@ class GoogleSheetsLinkedService(LinkedService): "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -28989,7 +29690,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -29007,9 +29708,8 @@ def __init__( :keyword api_token: The api token for the GoogleSheets source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -29048,9 +29748,8 @@ class GreenplumLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -29066,7 +29765,7 @@ class GreenplumLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -29079,7 +29778,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -29100,9 +29799,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -29388,9 +30086,8 @@ class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-at the server. The default value is false. :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -29416,7 +30113,7 @@ class HBaseLinkedService(LinkedService): # pylint: disable=too-many-instance-at "trusted_cert_path": {"key": "typeProperties.trustedCertPath", "type": "object"}, "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -29437,7 +30134,7 @@ def __init__( trusted_cert_path: Optional[JSON] = None, allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -29482,9 +30179,8 @@ def __init__( from the server. The default value is false. :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -29737,9 +30433,8 @@ class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-att are: Anonymous and Windows. Type: string (or Expression with resultType string). :vartype authentication_type: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar user_name: User name for Windows authentication. Type: string (or Expression with resultType string). :vartype user_name: JSON @@ -29761,7 +30456,7 @@ class HdfsLinkedService(LinkedService): # pylint: disable=too-many-instance-att "annotations": {"key": "annotations", "type": "[object]"}, "url": {"key": "typeProperties.url", "type": "object"}, "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, } @@ -29776,7 +30471,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, authentication_type: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, **kwargs: Any @@ -29800,9 +30495,8 @@ def __init__( values are: Anonymous and Windows. Type: string (or Expression with resultType string). :paramtype authentication_type: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword user_name: User name for Windows authentication. Type: string (or Expression with resultType string). :paramtype user_name: JSON @@ -29908,8 +30602,9 @@ class HdfsReadSettings(StoreReadSettings): # pylint: disable=too-many-instance- configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -29939,7 +30634,7 @@ class HdfsReadSettings(StoreReadSettings): # pylint: disable=too-many-instance- "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, @@ -29957,7 +30652,7 @@ def __init__( wildcard_folder_path: Optional[JSON] = None, wildcard_file_name: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, modified_datetime_end: Optional[JSON] = None, @@ -29988,8 +30683,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -30129,6 +30825,13 @@ class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-inst :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -30150,7 +30853,7 @@ class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-inst :ivar defines: Allows user to specify defines for Hive job request. :vartype defines: dict[str, JSON] :ivar variables: User specified arguments under hivevar namespace. - :vartype variables: list[JSON] + :vartype variables: dict[str, JSON] :ivar query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :vartype query_timeout: int @@ -30166,6 +30869,8 @@ class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-inst "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -30176,7 +30881,7 @@ class HDInsightHiveActivity(ExecutionActivity): # pylint: disable=too-many-inst "script_path": {"key": "typeProperties.scriptPath", "type": "object"}, "script_linked_service": {"key": "typeProperties.scriptLinkedService", "type": "LinkedServiceReference"}, "defines": {"key": "typeProperties.defines", "type": "{object}"}, - "variables": {"key": "typeProperties.variables", "type": "[object]"}, + "variables": {"key": "typeProperties.variables", "type": "{object}"}, "query_timeout": {"key": "typeProperties.queryTimeout", "type": "int"}, } @@ -30186,6 +30891,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -30196,7 +30903,7 @@ def __init__( script_path: Optional[JSON] = None, script_linked_service: Optional["_models.LinkedServiceReference"] = None, defines: Optional[Dict[str, JSON]] = None, - variables: Optional[List[JSON]] = None, + variables: Optional[Dict[str, JSON]] = None, query_timeout: Optional[int] = None, **kwargs: Any ) -> None: @@ -30208,6 +30915,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -30230,7 +30944,7 @@ def __init__( :keyword defines: Allows user to specify defines for Hive job request. :paramtype defines: dict[str, JSON] :keyword variables: User specified arguments under hivevar namespace. - :paramtype variables: list[JSON] + :paramtype variables: dict[str, JSON] :keyword query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package). :paramtype query_timeout: int @@ -30239,6 +30953,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -30288,9 +31004,8 @@ class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instanc the HCatalog database. :vartype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :vartype is_esp_enabled: JSON @@ -30319,7 +31034,7 @@ class HDInsightLinkedService(LinkedService): # pylint: disable=too-many-instanc "key": "typeProperties.hcatalogLinkedServiceName", "type": "LinkedServiceReference", }, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "is_esp_enabled": {"key": "typeProperties.isEspEnabled", "type": "object"}, "file_system": {"key": "typeProperties.fileSystem", "type": "object"}, } @@ -30337,7 +31052,7 @@ def __init__( password: Optional["_models.SecretBase"] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, hcatalog_linked_service_name: Optional["_models.LinkedServiceReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, is_esp_enabled: Optional[JSON] = None, file_system: Optional[JSON] = None, **kwargs: Any @@ -30368,9 +31083,8 @@ def __init__( to the HCatalog database. :paramtype hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :paramtype is_esp_enabled: JSON @@ -30411,6 +31125,13 @@ class HDInsightMapReduceActivity(ExecutionActivity): # pylint: disable=too-many :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -30449,6 +31170,8 @@ class HDInsightMapReduceActivity(ExecutionActivity): # pylint: disable=too-many "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -30471,6 +31194,8 @@ def __init__( jar_file_path: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -30491,6 +31216,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -30522,6 +31254,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -30636,9 +31370,8 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many HDInsight cluster. :vartype yarn_configuration: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar head_node_size: Specifies the size of the head node for the HDInsight cluster. :vartype head_node_size: JSON :ivar data_node_size: Specifies the size of the data node for the HDInsight cluster. @@ -30709,7 +31442,7 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many "oozie_configuration": {"key": "typeProperties.oozieConfiguration", "type": "object"}, "storm_configuration": {"key": "typeProperties.stormConfiguration", "type": "object"}, "yarn_configuration": {"key": "typeProperties.yarnConfiguration", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "head_node_size": {"key": "typeProperties.headNodeSize", "type": "object"}, "data_node_size": {"key": "typeProperties.dataNodeSize", "type": "object"}, "zookeeper_node_size": {"key": "typeProperties.zookeeperNodeSize", "type": "object"}, @@ -30753,7 +31486,7 @@ def __init__( # pylint: disable=too-many-locals oozie_configuration: Optional[JSON] = None, storm_configuration: Optional[JSON] = None, yarn_configuration: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, head_node_size: Optional[JSON] = None, data_node_size: Optional[JSON] = None, zookeeper_node_size: Optional[JSON] = None, @@ -30854,9 +31587,8 @@ def __init__( # pylint: disable=too-many-locals the HDInsight cluster. :paramtype yarn_configuration: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword head_node_size: Specifies the size of the head node for the HDInsight cluster. :paramtype head_node_size: JSON :keyword data_node_size: Specifies the size of the data node for the HDInsight cluster. @@ -30936,6 +31668,13 @@ class HDInsightPigActivity(ExecutionActivity): # pylint: disable=too-many-insta :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -30969,6 +31708,8 @@ class HDInsightPigActivity(ExecutionActivity): # pylint: disable=too-many-insta "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -30987,6 +31728,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -31007,6 +31750,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -31034,6 +31784,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -31063,6 +31815,13 @@ class HDInsightSparkActivity(ExecutionActivity): # pylint: disable=too-many-ins :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -31105,6 +31864,8 @@ class HDInsightSparkActivity(ExecutionActivity): # pylint: disable=too-many-ins "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -31127,6 +31888,8 @@ def __init__( entry_file_path: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -31147,6 +31910,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -31181,6 +31951,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -31212,6 +31984,13 @@ class HDInsightStreamingActivity(ExecutionActivity): # pylint: disable=too-many :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -31263,6 +32042,8 @@ class HDInsightStreamingActivity(ExecutionActivity): # pylint: disable=too-many "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -31292,6 +32073,8 @@ def __init__( file_paths: List[JSON], additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -31313,6 +32096,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -31355,6 +32145,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -31442,9 +32234,8 @@ class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-att the server. The default value is false. :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -31476,7 +32267,7 @@ class HiveLinkedService(LinkedService): # pylint: disable=too-many-instance-att "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -31503,7 +32294,7 @@ def __init__( use_system_trust_store: Optional[JSON] = None, allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -31566,9 +32357,8 @@ def __init__( from the server. The default value is false. :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -32016,9 +32806,8 @@ class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-att with resultType string). :vartype cert_thumbprint: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). :vartype enable_server_certificate_validation: JSON @@ -32043,7 +32832,7 @@ class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-att "auth_headers": {"key": "typeProperties.authHeaders", "type": "object"}, "embedded_cert_data": {"key": "typeProperties.embeddedCertData", "type": "object"}, "cert_thumbprint": {"key": "typeProperties.certThumbprint", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "enable_server_certificate_validation": { "key": "typeProperties.enableServerCertificateValidation", "type": "object", @@ -32065,7 +32854,7 @@ def __init__( auth_headers: Optional[JSON] = None, embedded_cert_data: Optional[JSON] = None, cert_thumbprint: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, enable_server_certificate_validation: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -32107,9 +32896,8 @@ def __init__( with resultType string). :paramtype cert_thumbprint: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). :paramtype enable_server_certificate_validation: JSON @@ -32135,7 +32923,7 @@ def __init__( class HttpReadSettings(StoreReadSettings): - """Sftp read settings. + """Http read settings. All required parameters must be populated in order to send to Azure. @@ -32160,13 +32948,11 @@ class HttpReadSettings(StoreReadSettings): string (or Expression with resultType string). :vartype additional_headers: JSON :ivar request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP - server. + server. Type: string (or Expression with resultType string). :vartype request_timeout: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool - :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: - string (or Expression with resultType string). - :vartype partition_root_path: JSON + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :vartype additional_columns: JSON """ _validation = { @@ -32182,8 +32968,7 @@ class HttpReadSettings(StoreReadSettings): "request_body": {"key": "requestBody", "type": "object"}, "additional_headers": {"key": "additionalHeaders", "type": "object"}, "request_timeout": {"key": "requestTimeout", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, - "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( @@ -32196,8 +32981,7 @@ def __init__( request_body: Optional[JSON] = None, additional_headers: Optional[JSON] = None, request_timeout: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, - partition_root_path: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -32220,13 +33004,11 @@ def __init__( Type: string (or Expression with resultType string). :paramtype additional_headers: JSON :keyword request_timeout: Specifies the timeout for a HTTP client to get HTTP response from - HTTP server. + HTTP server. Type: string (or Expression with resultType string). :paramtype request_timeout: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool - :keyword partition_root_path: Specify the root path where partition discovery starts from. - Type: string (or Expression with resultType string). - :paramtype partition_root_path: JSON + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: JSON """ super().__init__( additional_properties=additional_properties, @@ -32239,8 +33021,7 @@ def __init__( self.request_body = request_body self.additional_headers = additional_headers self.request_timeout = request_timeout - self.enable_partition_discovery = enable_partition_discovery - self.partition_root_path = partition_root_path + self.additional_columns = additional_columns class HttpServerLocation(DatasetLocation): @@ -32433,9 +33214,8 @@ class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance- connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -32457,7 +33237,7 @@ class HubspotLinkedService(LinkedService): # pylint: disable=too-many-instance- "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -32475,7 +33255,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -32511,9 +33291,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -32738,7 +33517,7 @@ def __init__( self.query = query -class IfConditionActivity(ControlActivity): +class IfConditionActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. @@ -32754,6 +33533,13 @@ class IfConditionActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -32780,6 +33566,8 @@ class IfConditionActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "expression": {"key": "typeProperties.expression", "type": "Expression"}, @@ -32794,6 +33582,8 @@ def __init__( expression: "_models.Expression", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, if_true_activities: Optional[List["_models.Activity"]] = None, @@ -32808,6 +33598,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -32828,6 +33625,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -32886,9 +33685,8 @@ class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-a the server. The default value is false. :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -32914,7 +33712,7 @@ class ImpalaLinkedService(LinkedService): # pylint: disable=too-many-instance-a "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -32935,7 +33733,7 @@ def __init__( use_system_trust_store: Optional[JSON] = None, allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -32981,9 +33779,8 @@ def __init__( from the server. The default value is false. :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -33249,8 +34046,8 @@ class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] :ivar connection_string: The non-access credential portion of the connection string as well as - an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - Required. + an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType @@ -33265,9 +34062,8 @@ class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -33287,7 +34083,7 @@ class InformixLinkedService(LinkedService): # pylint: disable=too-many-instance "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -33303,7 +34099,7 @@ def __init__( credential: Optional["_models.SecretBase"] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -33319,8 +34115,8 @@ def __init__( :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] :keyword connection_string: The non-access credential portion of the connection string as well - as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. Required. + as an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with @@ -33335,9 +34131,8 @@ def __init__( :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -33977,6 +34772,9 @@ class IntegrationRuntimeDataFlowProperties(_serialization.Model): :ivar cleanup: Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. :vartype cleanup: bool + :ivar custom_properties: Custom properties are used to tune the data flow runtime performance. + :vartype custom_properties: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem] """ _validation = { @@ -33989,6 +34787,10 @@ class IntegrationRuntimeDataFlowProperties(_serialization.Model): "core_count": {"key": "coreCount", "type": "int"}, "time_to_live": {"key": "timeToLive", "type": "int"}, "cleanup": {"key": "cleanup", "type": "bool"}, + "custom_properties": { + "key": "customProperties", + "type": "[IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem]", + }, } def __init__( @@ -33999,6 +34801,7 @@ def __init__( core_count: Optional[int] = None, time_to_live: Optional[int] = None, cleanup: Optional[bool] = None, + custom_properties: Optional[List["_models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem"]] = None, **kwargs: Any ) -> None: """ @@ -34017,6 +34820,10 @@ def __init__( :keyword cleanup: Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. :paramtype cleanup: bool + :keyword custom_properties: Custom properties are used to tune the data flow runtime + performance. + :paramtype custom_properties: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem] """ super().__init__(**kwargs) self.additional_properties = additional_properties @@ -34024,6 +34831,33 @@ def __init__( self.core_count = core_count self.time_to_live = time_to_live self.cleanup = cleanup + self.custom_properties = custom_properties + + +class IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem(_serialization.Model): + """IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem. + + :ivar name: Name of custom property. + :vartype name: str + :ivar value: Value of custom property. + :vartype value: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, value: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword name: Name of custom property. + :paramtype name: str + :keyword value: Value of custom property. + :paramtype value: str + """ + super().__init__(**kwargs) + self.name = name + self.value = value class IntegrationRuntimeDataProxyProperties(_serialization.Model): @@ -34879,9 +35713,8 @@ class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-att connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -34904,7 +35737,7 @@ class JiraLinkedService(LinkedService): # pylint: disable=too-many-instance-att "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -34922,7 +35755,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -34959,9 +35792,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -35282,16 +36114,863 @@ def __init__( :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder - :keyword location: The location of the json data storage. - :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation - :keyword encoding_name: The code page name of the preferred encoding. If not specified, the - default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column - of the table in the following link to set supported values: - https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + :keyword location: The location of the json data storage. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + :keyword encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :paramtype encoding_name: JSON + :keyword compression: The data compression method used for the json dataset. + :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type: str = "Json" + self.location = location + self.encoding_name = encoding_name + self.compression = compression + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage format. Required. + :vartype type: str + :ivar serializer: Serializer. Type: string (or Expression with resultType string). + :vartype serializer: JSON + :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). + :vartype deserializer: JSON + :ivar file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :vartype file_pattern: JSON + :ivar nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :vartype nesting_separator: JSON + :ivar encoding_name: The code page name of the preferred encoding. If not provided, the default + value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full + list of supported values can be found in the 'Name' column of the table of encodings in the + following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :vartype encoding_name: JSON + :ivar json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :vartype json_node_reference: JSON + :ivar json_path_definition: The JSONPath definition for each column mapping with a customized + column name to extract data from JSON file. For fields under root object, start with "$"; for + fields inside the array chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or + Expression with resultType object). + :vartype json_path_definition: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "serializer": {"key": "serializer", "type": "object"}, + "deserializer": {"key": "deserializer", "type": "object"}, + "file_pattern": {"key": "filePattern", "type": "object"}, + "nesting_separator": {"key": "nestingSeparator", "type": "object"}, + "encoding_name": {"key": "encodingName", "type": "object"}, + "json_node_reference": {"key": "jsonNodeReference", "type": "object"}, + "json_path_definition": {"key": "jsonPathDefinition", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + serializer: Optional[JSON] = None, + deserializer: Optional[JSON] = None, + file_pattern: Optional[JSON] = None, + nesting_separator: Optional[JSON] = None, + encoding_name: Optional[JSON] = None, + json_node_reference: Optional[JSON] = None, + json_path_definition: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword serializer: Serializer. Type: string (or Expression with resultType string). + :paramtype serializer: JSON + :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). + :paramtype deserializer: JSON + :keyword file_pattern: File pattern of JSON. To be more specific, the way of separating a + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: JSON + :keyword nesting_separator: The character used to separate nesting levels. Default value is '.' + (dot). Type: string (or Expression with resultType string). + :paramtype nesting_separator: JSON + :keyword encoding_name: The code page name of the preferred encoding. If not provided, the + default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. + The full list of supported values can be found in the 'Name' column of the table of encodings + in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or + Expression with resultType string). + :paramtype encoding_name: JSON + :keyword json_node_reference: The JSONPath of the JSON array element to be flattened. Example: + "$.ArrayPath". Type: string (or Expression with resultType string). + :paramtype json_node_reference: JSON + :keyword json_path_definition: The JSONPath definition for each column mapping with a + customized column name to extract data from JSON file. For fields under root object, start with + "$"; for fields inside the array chosen by jsonNodeReference property, start from the array + element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object + (or Expression with resultType object). + :paramtype json_path_definition: JSON + """ + super().__init__( + additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs + ) + self.type: str = "JsonFormat" + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + + +class JsonReadSettings(FormatReadSettings): + """Json read settings. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + compression_properties: Optional["_models.CompressionReadSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + super().__init__(additional_properties=additional_properties, **kwargs) + self.type: str = "JsonReadSettings" + self.compression_properties = compression_properties + + +class JsonSink(CopySink): + """A copy activity Json sink. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :vartype write_batch_size: JSON + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype write_batch_timeout: JSON + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :vartype sink_retry_count: JSON + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype sink_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "JsonWriteSettings"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + store_settings: Optional["_models.StoreWriteSettings"] = None, + format_settings: Optional["_models.JsonWriteSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: JSON + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: JSON + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: JSON + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "JsonSink" + self.store_settings = store_settings + self.format_settings = format_settings + + +class JsonSource(CopySource): + """A copy activity Json source. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :vartype source_retry_count: JSON + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype source_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar store_settings: Json store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Json format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :vartype additional_columns: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "JsonReadSettings"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + store_settings: Optional["_models.StoreReadSettings"] = None, + format_settings: Optional["_models.JsonReadSettings"] = None, + additional_columns: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: JSON + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword store_settings: Json store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Json format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "JsonSource" + self.store_settings = store_settings + self.format_settings = format_settings + self.additional_columns = additional_columns + + +class JsonWriteSettings(FormatWriteSettings): + """Json write settings. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. + :vartype type: str + :ivar file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :vartype file_pattern: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "file_pattern": {"key": "filePattern", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + file_pattern: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword file_pattern: File pattern of JSON. This setting controls the way a collection of JSON + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :paramtype file_pattern: JSON + """ + super().__init__(additional_properties=additional_properties, **kwargs) + self.type: str = "JsonWriteSettings" + self.file_pattern = file_pattern + + +class LakeHouseLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes + """Microsoft Fabric LakeHouse linked service. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[JSON] + :ivar workspace_id: The ID of Microsoft Fabric workspace. Type: string (or Expression with + resultType string). + :vartype workspace_id: JSON + :ivar artifact_id: The ID of Microsoft Fabric LakeHouse artifact. Type: string (or Expression + with resultType string). + :vartype artifact_id: JSON + :ivar service_principal_id: The ID of the application used to authenticate against Microsoft + Fabric LakeHouse. Type: string (or Expression with resultType string). + :vartype service_principal_id: JSON + :ivar service_principal_key: The Key of the application used to authenticate against Microsoft + Fabric LakeHouse. + :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :ivar tenant: The name or ID of the tenant to which the service principal belongs. Type: string + (or Expression with resultType string). + :vartype tenant: JSON + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str + :ivar service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :vartype service_principal_credential_type: JSON + :ivar service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :vartype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "workspace_id": {"key": "typeProperties.workspaceId", "type": "object"}, + "artifact_id": {"key": "typeProperties.artifactId", "type": "object"}, + "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, + "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, + "tenant": {"key": "typeProperties.tenant", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + "service_principal_credential_type": {"key": "typeProperties.servicePrincipalCredentialType", "type": "object"}, + "service_principal_credential": {"key": "typeProperties.servicePrincipalCredential", "type": "SecretBase"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + connect_via: Optional["_models.IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + workspace_id: Optional[JSON] = None, + artifact_id: Optional[JSON] = None, + service_principal_id: Optional[JSON] = None, + service_principal_key: Optional["_models.SecretBase"] = None, + tenant: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, + service_principal_credential_type: Optional[JSON] = None, + service_principal_credential: Optional["_models.SecretBase"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[JSON] + :keyword workspace_id: The ID of Microsoft Fabric workspace. Type: string (or Expression with + resultType string). + :paramtype workspace_id: JSON + :keyword artifact_id: The ID of Microsoft Fabric LakeHouse artifact. Type: string (or + Expression with resultType string). + :paramtype artifact_id: JSON + :keyword service_principal_id: The ID of the application used to authenticate against Microsoft + Fabric LakeHouse. Type: string (or Expression with resultType string). + :paramtype service_principal_id: JSON + :keyword service_principal_key: The Key of the application used to authenticate against + Microsoft Fabric LakeHouse. + :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :keyword tenant: The name or ID of the tenant to which the service principal belongs. Type: + string (or Expression with resultType string). + :paramtype tenant: JSON + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str + :keyword service_principal_credential_type: The service principal credential type to use in + Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' + for certificate. Type: string (or Expression with resultType string). + :paramtype service_principal_credential_type: JSON + :keyword service_principal_credential: The credential of the service principal object in Azure + Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', + servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If + servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only + be AzureKeyVaultSecretReference. + :paramtype service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type: str = "LakeHouse" + self.workspace_id = workspace_id + self.artifact_id = artifact_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.service_principal_credential_type = service_principal_credential_type + self.service_principal_credential = service_principal_credential + + +class LakeHouseLocation(DatasetLocation): + """The location of Microsoft Fabric LakeHouse Files dataset. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset storage location. Required. + :vartype type: str + :ivar folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :vartype folder_path: JSON + :ivar file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :vartype file_name: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "folder_path": {"key": "folderPath", "type": "object"}, + "file_name": {"key": "fileName", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + folder_path: Optional[JSON] = None, + file_name: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :paramtype folder_path: JSON + :keyword file_name: Specify the file name of dataset. Type: string (or Expression with + resultType string). + :paramtype file_name: JSON + """ + super().__init__( + additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs + ) + self.type: str = "LakeHouseLocation" + + +class LakeHouseReadSettings(StoreReadSettings): # pylint: disable=too-many-instance-attributes + """Microsoft Fabric LakeHouse Files read settings. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. + :vartype type: str + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :vartype recursive: JSON + :ivar wildcard_folder_path: Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: string + (or Expression with resultType string). + :vartype wildcard_folder_path: JSON + :ivar wildcard_file_name: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string (or + Expression with resultType string). + :vartype wildcard_file_name: JSON + :ivar file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :vartype file_list_path: JSON + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON + :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :vartype partition_root_path: JSON + :ivar delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :vartype delete_files_after_completion: JSON + :ivar modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :vartype modified_datetime_start: JSON + :ivar modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :vartype modified_datetime_end: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "recursive": {"key": "recursive", "type": "object"}, + "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, + "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, + "file_list_path": {"key": "fileListPath", "type": "object"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, + "partition_root_path": {"key": "partitionRootPath", "type": "object"}, + "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, + "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, + "modified_datetime_end": {"key": "modifiedDatetimeEnd", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + recursive: Optional[JSON] = None, + wildcard_folder_path: Optional[JSON] = None, + wildcard_file_name: Optional[JSON] = None, + file_list_path: Optional[JSON] = None, + enable_partition_discovery: Optional[JSON] = None, + partition_root_path: Optional[JSON] = None, + delete_files_after_completion: Optional[JSON] = None, + modified_datetime_start: Optional[JSON] = None, + modified_datetime_end: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :paramtype recursive: JSON + :keyword wildcard_folder_path: Microsoft Fabric LakeHouse Files wildcardFolderPath. Type: + string (or Expression with resultType string). + :paramtype wildcard_folder_path: JSON + :keyword wildcard_file_name: Microsoft Fabric LakeHouse Files wildcardFileName. Type: string + (or Expression with resultType string). + :paramtype wildcard_file_name: JSON + :keyword file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :paramtype file_list_path: JSON + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON + :keyword partition_root_path: Specify the root path where partition discovery starts from. + Type: string (or Expression with resultType string). + :paramtype partition_root_path: JSON + :keyword delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :paramtype delete_files_after_completion: JSON + :keyword modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_start: JSON + :keyword modified_datetime_end: The end of file's modified datetime. Type: string (or + Expression with resultType string). + :paramtype modified_datetime_end: JSON + """ + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "LakeHouseReadSettings" + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class LakeHouseTableDataset(Dataset): + """Microsoft Fabric LakeHouse Table. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :vartype structure: JSON + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[JSON] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar table: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with + resultType string). + :vartype table: JSON + """ + + _validation = { + "type": {"required": True}, + "linked_service_name": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "table": {"key": "typeProperties.table", "type": "object"}, + } + + def __init__( + self, + *, + linked_service_name: "_models.LinkedServiceReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + folder: Optional["_models.DatasetFolder"] = None, + table: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: JSON + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[JSON] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword table: The name of Microsoft Fabric LakeHouse Table. Type: string (or Expression with resultType string). - :paramtype encoding_name: JSON - :keyword compression: The data compression method used for the json dataset. - :paramtype compression: ~azure.mgmt.datafactory.models.DatasetCompression + :paramtype table: JSON """ super().__init__( additional_properties=additional_properties, @@ -35304,164 +36983,12 @@ def __init__( folder=folder, **kwargs ) - self.type: str = "Json" - self.location = location - self.encoding_name = encoding_name - self.compression = compression - - -class JsonFormat(DatasetStorageFormat): - """The data stored in JSON format. - - All required parameters must be populated in order to send to Azure. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, JSON] - :ivar type: Type of dataset storage format. Required. - :vartype type: str - :ivar serializer: Serializer. Type: string (or Expression with resultType string). - :vartype serializer: JSON - :ivar deserializer: Deserializer. Type: string (or Expression with resultType string). - :vartype deserializer: JSON - :ivar file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :vartype file_pattern: JSON - :ivar nesting_separator: The character used to separate nesting levels. Default value is '.' - (dot). Type: string (or Expression with resultType string). - :vartype nesting_separator: JSON - :ivar encoding_name: The code page name of the preferred encoding. If not provided, the default - value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full - list of supported values can be found in the 'Name' column of the table of encodings in the - following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or - Expression with resultType string). - :vartype encoding_name: JSON - :ivar json_node_reference: The JSONPath of the JSON array element to be flattened. Example: - "$.ArrayPath". Type: string (or Expression with resultType string). - :vartype json_node_reference: JSON - :ivar json_path_definition: The JSONPath definition for each column mapping with a customized - column name to extract data from JSON file. For fields under root object, start with "$"; for - fields inside the array chosen by jsonNodeReference property, start from the array element. - Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or - Expression with resultType object). - :vartype json_path_definition: JSON - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "additional_properties": {"key": "", "type": "{object}"}, - "type": {"key": "type", "type": "str"}, - "serializer": {"key": "serializer", "type": "object"}, - "deserializer": {"key": "deserializer", "type": "object"}, - "file_pattern": {"key": "filePattern", "type": "object"}, - "nesting_separator": {"key": "nestingSeparator", "type": "object"}, - "encoding_name": {"key": "encodingName", "type": "object"}, - "json_node_reference": {"key": "jsonNodeReference", "type": "object"}, - "json_path_definition": {"key": "jsonPathDefinition", "type": "object"}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, JSON]] = None, - serializer: Optional[JSON] = None, - deserializer: Optional[JSON] = None, - file_pattern: Optional[JSON] = None, - nesting_separator: Optional[JSON] = None, - encoding_name: Optional[JSON] = None, - json_node_reference: Optional[JSON] = None, - json_path_definition: Optional[JSON] = None, - **kwargs: Any - ) -> None: - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, JSON] - :keyword serializer: Serializer. Type: string (or Expression with resultType string). - :paramtype serializer: JSON - :keyword deserializer: Deserializer. Type: string (or Expression with resultType string). - :paramtype deserializer: JSON - :keyword file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. - :paramtype file_pattern: JSON - :keyword nesting_separator: The character used to separate nesting levels. Default value is '.' - (dot). Type: string (or Expression with resultType string). - :paramtype nesting_separator: JSON - :keyword encoding_name: The code page name of the preferred encoding. If not provided, the - default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. - The full list of supported values can be found in the 'Name' column of the table of encodings - in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or - Expression with resultType string). - :paramtype encoding_name: JSON - :keyword json_node_reference: The JSONPath of the JSON array element to be flattened. Example: - "$.ArrayPath". Type: string (or Expression with resultType string). - :paramtype json_node_reference: JSON - :keyword json_path_definition: The JSONPath definition for each column mapping with a - customized column name to extract data from JSON file. For fields under root object, start with - "$"; for fields inside the array chosen by jsonNodeReference property, start from the array - element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object - (or Expression with resultType object). - :paramtype json_path_definition: JSON - """ - super().__init__( - additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs - ) - self.type: str = "JsonFormat" - self.file_pattern = file_pattern - self.nesting_separator = nesting_separator - self.encoding_name = encoding_name - self.json_node_reference = json_node_reference - self.json_path_definition = json_path_definition - - -class JsonReadSettings(FormatReadSettings): - """Json read settings. - - All required parameters must be populated in order to send to Azure. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, JSON] - :ivar type: The read setting type. Required. - :vartype type: str - :ivar compression_properties: Compression settings. - :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - """ - - _validation = { - "type": {"required": True}, - } - - _attribute_map = { - "additional_properties": {"key": "", "type": "{object}"}, - "type": {"key": "type", "type": "str"}, - "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, JSON]] = None, - compression_properties: Optional["_models.CompressionReadSettings"] = None, - **kwargs: Any - ) -> None: - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, JSON] - :keyword compression_properties: Compression settings. - :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings - """ - super().__init__(additional_properties=additional_properties, **kwargs) - self.type: str = "JsonReadSettings" - self.compression_properties = compression_properties + self.type: str = "LakeHouseTable" + self.table = table -class JsonSink(CopySink): - """A copy activity Json sink. +class LakeHouseTableSink(CopySink): # pylint: disable=too-many-instance-attributes + """A copy activity for Microsoft Fabric LakeHouse Table sink. All required parameters must be populated in order to send to Azure. @@ -35488,10 +37015,16 @@ class JsonSink(CopySink): :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_metrics_collection: JSON - :ivar store_settings: Json store settings. - :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :ivar format_settings: Json format settings. - :vartype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + :ivar table_action_option: The type of table action for LakeHouse Table sink. Possible values + include: "None", "Append", "Overwrite". + :vartype table_action_option: JSON + :ivar partition_option: Create partitions in folder structure based on one or multiple columns. + Each distinct column value (pair) will be a new partition. Possible values include: "None", + "PartitionByKey". + :vartype partition_option: JSON + :ivar partition_name_list: Specify the partition column names from sink columns. Type: array of + objects (or Expression with resultType array of objects). + :vartype partition_name_list: JSON """ _validation = { @@ -35507,8 +37040,9 @@ class JsonSink(CopySink): "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, - "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, - "format_settings": {"key": "formatSettings", "type": "JsonWriteSettings"}, + "table_action_option": {"key": "tableActionOption", "type": "object"}, + "partition_option": {"key": "partitionOption", "type": "object"}, + "partition_name_list": {"key": "partitionNameList", "type": "object"}, } def __init__( @@ -35521,8 +37055,9 @@ def __init__( sink_retry_wait: Optional[JSON] = None, max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, - store_settings: Optional["_models.StoreWriteSettings"] = None, - format_settings: Optional["_models.JsonWriteSettings"] = None, + table_action_option: Optional[JSON] = None, + partition_option: Optional[JSON] = None, + partition_name_list: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -35547,10 +37082,16 @@ def __init__( :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_metrics_collection: JSON - :keyword store_settings: Json store settings. - :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings - :keyword format_settings: Json format settings. - :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings + :keyword table_action_option: The type of table action for LakeHouse Table sink. Possible + values include: "None", "Append", "Overwrite". + :paramtype table_action_option: JSON + :keyword partition_option: Create partitions in folder structure based on one or multiple + columns. Each distinct column value (pair) will be a new partition. Possible values include: + "None", "PartitionByKey". + :paramtype partition_option: JSON + :keyword partition_name_list: Specify the partition column names from sink columns. Type: array + of objects (or Expression with resultType array of objects). + :paramtype partition_name_list: JSON """ super().__init__( additional_properties=additional_properties, @@ -35562,13 +37103,14 @@ def __init__( disable_metrics_collection=disable_metrics_collection, **kwargs ) - self.type: str = "JsonSink" - self.store_settings = store_settings - self.format_settings = format_settings + self.type: str = "LakeHouseTableSink" + self.table_action_option = table_action_option + self.partition_option = partition_option + self.partition_name_list = partition_name_list -class JsonSource(CopySource): - """A copy activity Json source. +class LakeHouseTableSource(CopySource): + """A copy activity source for Microsoft Fabric LakeHouse Table. All required parameters must be populated in order to send to Azure. @@ -35589,10 +37131,12 @@ class JsonSource(CopySource): :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_metrics_collection: JSON - :ivar store_settings: Json store settings. - :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :ivar format_settings: Json format settings. - :vartype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :ivar timestamp_as_of: Query an older snapshot by timestamp. Type: string (or Expression with + resultType string). + :vartype timestamp_as_of: JSON + :ivar version_as_of: Query an older snapshot by version. Type: integer (or Expression with + resultType integer). + :vartype version_as_of: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :vartype additional_columns: JSON @@ -35609,8 +37153,8 @@ class JsonSource(CopySource): "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, - "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, - "format_settings": {"key": "formatSettings", "type": "JsonReadSettings"}, + "timestamp_as_of": {"key": "timestampAsOf", "type": "object"}, + "version_as_of": {"key": "versionAsOf", "type": "object"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, } @@ -35622,8 +37166,8 @@ def __init__( source_retry_wait: Optional[JSON] = None, max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, - store_settings: Optional["_models.StoreReadSettings"] = None, - format_settings: Optional["_models.JsonReadSettings"] = None, + timestamp_as_of: Optional[JSON] = None, + version_as_of: Optional[JSON] = None, additional_columns: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -35643,10 +37187,12 @@ def __init__( :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_metrics_collection: JSON - :keyword store_settings: Json store settings. - :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings - :keyword format_settings: Json format settings. - :paramtype format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings + :keyword timestamp_as_of: Query an older snapshot by timestamp. Type: string (or Expression + with resultType string). + :paramtype timestamp_as_of: JSON + :keyword version_as_of: Query an older snapshot by version. Type: integer (or Expression with + resultType integer). + :paramtype version_as_of: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :paramtype additional_columns: JSON @@ -35659,14 +37205,14 @@ def __init__( disable_metrics_collection=disable_metrics_collection, **kwargs ) - self.type: str = "JsonSource" - self.store_settings = store_settings - self.format_settings = format_settings + self.type: str = "LakeHouseTableSource" + self.timestamp_as_of = timestamp_as_of + self.version_as_of = version_as_of self.additional_columns = additional_columns -class JsonWriteSettings(FormatWriteSettings): - """Json write settings. +class LakeHouseWriteSettings(StoreWriteSettings): + """Microsoft Fabric LakeHouse Files write settings. All required parameters must be populated in order to send to Azure. @@ -35675,9 +37221,14 @@ class JsonWriteSettings(FormatWriteSettings): :vartype additional_properties: dict[str, JSON] :ivar type: The write setting type. Required. :vartype type: str - :ivar file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :vartype file_pattern: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar copy_behavior: The type of copy behavior for copy sink. + :vartype copy_behavior: JSON """ _validation = { @@ -35687,27 +37238,41 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "type": {"key": "type", "type": "str"}, - "file_pattern": {"key": "filePattern", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "copy_behavior": {"key": "copyBehavior", "type": "object"}, } def __init__( self, *, additional_properties: Optional[Dict[str, JSON]] = None, - file_pattern: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + copy_behavior: Optional[JSON] = None, **kwargs: Any ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, JSON] - :keyword file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. - :paramtype file_pattern: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword copy_behavior: The type of copy behavior for copy sink. + :paramtype copy_behavior: JSON """ - super().__init__(additional_properties=additional_properties, **kwargs) - self.type: str = "JsonWriteSettings" - self.file_pattern = file_pattern + super().__init__( + additional_properties=additional_properties, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + copy_behavior=copy_behavior, + **kwargs + ) + self.type: str = "LakeHouseWriteSettings" class LinkedIntegrationRuntime(_serialization.Model): @@ -36206,6 +37771,13 @@ class LookupActivity(ExecutionActivity): # pylint: disable=too-many-instance-at :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -36235,6 +37807,8 @@ class LookupActivity(ExecutionActivity): # pylint: disable=too-many-instance-at "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -36252,6 +37826,8 @@ def __init__( dataset: "_models.DatasetReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -36267,6 +37843,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -36287,6 +37870,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -36332,9 +37917,8 @@ class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance- connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -36354,7 +37938,7 @@ class MagentoLinkedService(LinkedService): # pylint: disable=too-many-instance- "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -36370,7 +37954,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -36400,9 +37984,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -37339,6 +38922,474 @@ def __init__(self, *, properties: "_models.ManagedVirtualNetwork", **kwargs: Any self.properties = properties +class MapperAttributeMapping(_serialization.Model): + """Source and target column mapping details. + + :ivar name: Name of the target column. + :vartype name: str + :ivar type: Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved as + 'Derived'. Known values are: "Direct", "Derived", and "Aggregate". + :vartype type: str or ~azure.mgmt.datafactory.models.MappingType + :ivar function_name: Name of the function used for 'Aggregate' and 'Derived' (except + 'Advanced') type mapping. + :vartype function_name: str + :ivar expression: Expression used for 'Aggregate' and 'Derived' type mapping. + :vartype expression: str + :ivar attribute_reference: Reference of the source column used in the mapping. It is used for + 'Direct' mapping type only. + :vartype attribute_reference: ~azure.mgmt.datafactory.models.MapperAttributeReference + :ivar attribute_references: List of references for source columns. It is used for 'Derived' and + 'Aggregate' type mappings only. + :vartype attribute_references: list[~azure.mgmt.datafactory.models.MapperAttributeReference] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "function_name": {"key": "functionName", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, + "attribute_reference": {"key": "attributeReference", "type": "MapperAttributeReference"}, + "attribute_references": {"key": "attributeReferences", "type": "[MapperAttributeReference]"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[Union[str, "_models.MappingType"]] = None, + function_name: Optional[str] = None, + expression: Optional[str] = None, + attribute_reference: Optional["_models.MapperAttributeReference"] = None, + attribute_references: Optional[List["_models.MapperAttributeReference"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Name of the target column. + :paramtype name: str + :keyword type: Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved + as 'Derived'. Known values are: "Direct", "Derived", and "Aggregate". + :paramtype type: str or ~azure.mgmt.datafactory.models.MappingType + :keyword function_name: Name of the function used for 'Aggregate' and 'Derived' (except + 'Advanced') type mapping. + :paramtype function_name: str + :keyword expression: Expression used for 'Aggregate' and 'Derived' type mapping. + :paramtype expression: str + :keyword attribute_reference: Reference of the source column used in the mapping. It is used + for 'Direct' mapping type only. + :paramtype attribute_reference: ~azure.mgmt.datafactory.models.MapperAttributeReference + :keyword attribute_references: List of references for source columns. It is used for 'Derived' + and 'Aggregate' type mappings only. + :paramtype attribute_references: list[~azure.mgmt.datafactory.models.MapperAttributeReference] + """ + super().__init__(**kwargs) + self.name = name + self.type = type + self.function_name = function_name + self.expression = expression + self.attribute_reference = attribute_reference + self.attribute_references = attribute_references + + +class MapperAttributeMappings(_serialization.Model): + """Attribute mapping details. + + :ivar attribute_mappings: List of attribute mappings. + :vartype attribute_mappings: list[~azure.mgmt.datafactory.models.MapperAttributeMapping] + """ + + _attribute_map = { + "attribute_mappings": {"key": "attributeMappings", "type": "[MapperAttributeMapping]"}, + } + + def __init__( + self, *, attribute_mappings: Optional[List["_models.MapperAttributeMapping"]] = None, **kwargs: Any + ) -> None: + """ + :keyword attribute_mappings: List of attribute mappings. + :paramtype attribute_mappings: list[~azure.mgmt.datafactory.models.MapperAttributeMapping] + """ + super().__init__(**kwargs) + self.attribute_mappings = attribute_mappings + + +class MapperAttributeReference(_serialization.Model): + """Attribute reference details for the referred column. + + :ivar name: Name of the column. + :vartype name: str + :ivar entity: Name of the table. + :vartype entity: str + :ivar entity_connection_reference: The connection reference for the connection. + :vartype entity_connection_reference: ~azure.mgmt.datafactory.models.MapperConnectionReference + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "entity": {"key": "entity", "type": "str"}, + "entity_connection_reference": {"key": "entityConnectionReference", "type": "MapperConnectionReference"}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + entity: Optional[str] = None, + entity_connection_reference: Optional["_models.MapperConnectionReference"] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Name of the column. + :paramtype name: str + :keyword entity: Name of the table. + :paramtype entity: str + :keyword entity_connection_reference: The connection reference for the connection. + :paramtype entity_connection_reference: + ~azure.mgmt.datafactory.models.MapperConnectionReference + """ + super().__init__(**kwargs) + self.name = name + self.entity = entity + self.entity_connection_reference = entity_connection_reference + + +class MapperConnection(_serialization.Model): + """Source connection details. + + All required parameters must be populated in order to send to Azure. + + :ivar linked_service: Linked service reference. + :vartype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar linked_service_type: Type of the linked service e.g.: AzureBlobFS. + :vartype linked_service_type: str + :ivar type: Type of connection via linked service or dataset. Required. "linkedservicetype" + :vartype type: str or ~azure.mgmt.datafactory.models.ConnectionType + :ivar is_inline_dataset: A boolean indicating whether linked service is of type inline dataset. + Currently only inline datasets are supported. + :vartype is_inline_dataset: bool + :ivar common_dsl_connector_properties: List of name/value pairs for connection properties. + :vartype common_dsl_connector_properties: + list[~azure.mgmt.datafactory.models.MapperDslConnectorProperties] + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "linked_service": {"key": "linkedService", "type": "LinkedServiceReference"}, + "linked_service_type": {"key": "linkedServiceType", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "is_inline_dataset": {"key": "isInlineDataset", "type": "bool"}, + "common_dsl_connector_properties": { + "key": "commonDslConnectorProperties", + "type": "[MapperDslConnectorProperties]", + }, + } + + def __init__( + self, + *, + type: Union[str, "_models.ConnectionType"], + linked_service: Optional["_models.LinkedServiceReference"] = None, + linked_service_type: Optional[str] = None, + is_inline_dataset: Optional[bool] = None, + common_dsl_connector_properties: Optional[List["_models.MapperDslConnectorProperties"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword linked_service: Linked service reference. + :paramtype linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword linked_service_type: Type of the linked service e.g.: AzureBlobFS. + :paramtype linked_service_type: str + :keyword type: Type of connection via linked service or dataset. Required. "linkedservicetype" + :paramtype type: str or ~azure.mgmt.datafactory.models.ConnectionType + :keyword is_inline_dataset: A boolean indicating whether linked service is of type inline + dataset. Currently only inline datasets are supported. + :paramtype is_inline_dataset: bool + :keyword common_dsl_connector_properties: List of name/value pairs for connection properties. + :paramtype common_dsl_connector_properties: + list[~azure.mgmt.datafactory.models.MapperDslConnectorProperties] + """ + super().__init__(**kwargs) + self.linked_service = linked_service + self.linked_service_type = linked_service_type + self.type = type + self.is_inline_dataset = is_inline_dataset + self.common_dsl_connector_properties = common_dsl_connector_properties + + +class MapperConnectionReference(_serialization.Model): + """Source or target connection reference details. + + :ivar connection_name: Name of the connection. + :vartype connection_name: str + :ivar type: Type of connection via linked service or dataset. "linkedservicetype" + :vartype type: str or ~azure.mgmt.datafactory.models.ConnectionType + """ + + _attribute_map = { + "connection_name": {"key": "connectionName", "type": "str"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__( + self, + *, + connection_name: Optional[str] = None, + type: Optional[Union[str, "_models.ConnectionType"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword connection_name: Name of the connection. + :paramtype connection_name: str + :keyword type: Type of connection via linked service or dataset. "linkedservicetype" + :paramtype type: str or ~azure.mgmt.datafactory.models.ConnectionType + """ + super().__init__(**kwargs) + self.connection_name = connection_name + self.type = type + + +class MapperDslConnectorProperties(_serialization.Model): + """Connector properties of a CDC table in terms of name / value pairs. + + :ivar name: Name of the property. + :vartype name: str + :ivar value: Value of the property. + :vartype value: JSON + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "object"}, + } + + def __init__(self, *, name: Optional[str] = None, value: Optional[JSON] = None, **kwargs: Any) -> None: + """ + :keyword name: Name of the property. + :paramtype name: str + :keyword value: Value of the property. + :paramtype value: JSON + """ + super().__init__(**kwargs) + self.name = name + self.value = value + + +class MapperPolicy(_serialization.Model): + """CDC Policy. + + :ivar mode: Mode of running the CDC: batch vs continuous. + :vartype mode: str + :ivar recurrence: Defines the frequency and interval for running the CDC for batch mode. + :vartype recurrence: ~azure.mgmt.datafactory.models.MapperPolicyRecurrence + """ + + _attribute_map = { + "mode": {"key": "mode", "type": "str"}, + "recurrence": {"key": "recurrence", "type": "MapperPolicyRecurrence"}, + } + + def __init__( + self, + *, + mode: Optional[str] = None, + recurrence: Optional["_models.MapperPolicyRecurrence"] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: Mode of running the CDC: batch vs continuous. + :paramtype mode: str + :keyword recurrence: Defines the frequency and interval for running the CDC for batch mode. + :paramtype recurrence: ~azure.mgmt.datafactory.models.MapperPolicyRecurrence + """ + super().__init__(**kwargs) + self.mode = mode + self.recurrence = recurrence + + +class MapperPolicyRecurrence(_serialization.Model): + """CDC policy recurrence details. + + :ivar frequency: Frequency of period in terms of 'Hour', 'Minute' or 'Second'. Known values + are: "Hour", "Minute", and "Second". + :vartype frequency: str or ~azure.mgmt.datafactory.models.FrequencyType + :ivar interval: Actual interval value as per chosen frequency. + :vartype interval: int + """ + + _attribute_map = { + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + } + + def __init__( + self, + *, + frequency: Optional[Union[str, "_models.FrequencyType"]] = None, + interval: Optional[int] = None, + **kwargs: Any + ) -> None: + """ + :keyword frequency: Frequency of period in terms of 'Hour', 'Minute' or 'Second'. Known values + are: "Hour", "Minute", and "Second". + :paramtype frequency: str or ~azure.mgmt.datafactory.models.FrequencyType + :keyword interval: Actual interval value as per chosen frequency. + :paramtype interval: int + """ + super().__init__(**kwargs) + self.frequency = frequency + self.interval = interval + + +class MapperSourceConnectionsInfo(_serialization.Model): + """A object which contains list of tables and connection details for a source connection. + + :ivar source_entities: List of source tables for a source connection. + :vartype source_entities: list[~azure.mgmt.datafactory.models.MapperTable] + :ivar connection: Source connection details. + :vartype connection: ~azure.mgmt.datafactory.models.MapperConnection + """ + + _attribute_map = { + "source_entities": {"key": "sourceEntities", "type": "[MapperTable]"}, + "connection": {"key": "connection", "type": "MapperConnection"}, + } + + def __init__( + self, + *, + source_entities: Optional[List["_models.MapperTable"]] = None, + connection: Optional["_models.MapperConnection"] = None, + **kwargs: Any + ) -> None: + """ + :keyword source_entities: List of source tables for a source connection. + :paramtype source_entities: list[~azure.mgmt.datafactory.models.MapperTable] + :keyword connection: Source connection details. + :paramtype connection: ~azure.mgmt.datafactory.models.MapperConnection + """ + super().__init__(**kwargs) + self.source_entities = source_entities + self.connection = connection + + +class MapperTable(_serialization.Model): + """CDC table details. + + :ivar name: Name of the table. + :vartype name: str + :ivar schema: List of columns for the source table. + :vartype schema: list[~azure.mgmt.datafactory.models.MapperTableSchema] + :ivar dsl_connector_properties: List of name/value pairs for connection properties. + :vartype dsl_connector_properties: + list[~azure.mgmt.datafactory.models.MapperDslConnectorProperties] + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "schema": {"key": "properties.schema", "type": "[MapperTableSchema]"}, + "dsl_connector_properties": { + "key": "properties.dslConnectorProperties", + "type": "[MapperDslConnectorProperties]", + }, + } + + def __init__( + self, + *, + name: Optional[str] = None, + schema: Optional[List["_models.MapperTableSchema"]] = None, + dsl_connector_properties: Optional[List["_models.MapperDslConnectorProperties"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword name: Name of the table. + :paramtype name: str + :keyword schema: List of columns for the source table. + :paramtype schema: list[~azure.mgmt.datafactory.models.MapperTableSchema] + :keyword dsl_connector_properties: List of name/value pairs for connection properties. + :paramtype dsl_connector_properties: + list[~azure.mgmt.datafactory.models.MapperDslConnectorProperties] + """ + super().__init__(**kwargs) + self.name = name + self.schema = schema + self.dsl_connector_properties = dsl_connector_properties + + +class MapperTableSchema(_serialization.Model): + """Schema of a CDC table in terms of column names and their corresponding data types. + + :ivar name: Name of the column. + :vartype name: str + :ivar data_type: Data type of the column. + :vartype data_type: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, data_type: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword name: Name of the column. + :paramtype name: str + :keyword data_type: Data type of the column. + :paramtype data_type: str + """ + super().__init__(**kwargs) + self.name = name + self.data_type = data_type + + +class MapperTargetConnectionsInfo(_serialization.Model): + """A object which contains list of tables and connection details for a target connection. + + :ivar target_entities: List of source tables for a target connection. + :vartype target_entities: list[~azure.mgmt.datafactory.models.MapperTable] + :ivar connection: Source connection details. + :vartype connection: ~azure.mgmt.datafactory.models.MapperConnection + :ivar data_mapper_mappings: List of table mappings. + :vartype data_mapper_mappings: list[~azure.mgmt.datafactory.models.DataMapperMapping] + :ivar relationships: List of relationship info among the tables. + :vartype relationships: list[JSON] + """ + + _attribute_map = { + "target_entities": {"key": "targetEntities", "type": "[MapperTable]"}, + "connection": {"key": "connection", "type": "MapperConnection"}, + "data_mapper_mappings": {"key": "dataMapperMappings", "type": "[DataMapperMapping]"}, + "relationships": {"key": "relationships", "type": "[object]"}, + } + + def __init__( + self, + *, + target_entities: Optional[List["_models.MapperTable"]] = None, + connection: Optional["_models.MapperConnection"] = None, + data_mapper_mappings: Optional[List["_models.DataMapperMapping"]] = None, + relationships: Optional[List[JSON]] = None, + **kwargs: Any + ) -> None: + """ + :keyword target_entities: List of source tables for a target connection. + :paramtype target_entities: list[~azure.mgmt.datafactory.models.MapperTable] + :keyword connection: Source connection details. + :paramtype connection: ~azure.mgmt.datafactory.models.MapperConnection + :keyword data_mapper_mappings: List of table mappings. + :paramtype data_mapper_mappings: list[~azure.mgmt.datafactory.models.DataMapperMapping] + :keyword relationships: List of relationship info among the tables. + :paramtype relationships: list[JSON] + """ + super().__init__(**kwargs) + self.target_entities = target_entities + self.connection = connection + self.data_mapper_mappings = data_mapper_mappings + self.relationships = relationships + + class MappingDataFlow(DataFlow): """Mapping data flow. @@ -37446,9 +39497,8 @@ class MariaDBLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -37464,7 +39514,7 @@ class MariaDBLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -37477,7 +39527,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -37498,9 +39548,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -37755,9 +39804,8 @@ class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance- connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -37779,7 +39827,7 @@ class MarketoLinkedService(LinkedService): # pylint: disable=too-many-instance- "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -37796,7 +39844,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -37829,9 +39877,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -38100,8 +40147,8 @@ class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-i :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] :ivar connection_string: The non-access credential portion of the connection string as well as - an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - Required. + an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with @@ -38116,9 +40163,8 @@ class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-i :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -38138,7 +40184,7 @@ class MicrosoftAccessLinkedService(LinkedService): # pylint: disable=too-many-i "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -38154,7 +40200,7 @@ def __init__( credential: Optional["_models.SecretBase"] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -38170,8 +40216,8 @@ def __init__( :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] :keyword connection_string: The non-access credential portion of the connection string as well - as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. Required. + as an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with @@ -38186,9 +40232,8 @@ def __init__( :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -38634,6 +40679,9 @@ class MongoDbAtlasLinkedService(LinkedService): :ivar database: The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). Required. :vartype database: JSON + :ivar driver_version: The driver version that you want to choose. Allowed value are v1 and v2. + Type: string (or Expression with resultType string). + :vartype driver_version: JSON """ _validation = { @@ -38651,6 +40699,7 @@ class MongoDbAtlasLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "database": {"key": "typeProperties.database", "type": "object"}, + "driver_version": {"key": "typeProperties.driverVersion", "type": "object"}, } def __init__( @@ -38663,6 +40712,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, + driver_version: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -38684,6 +40734,9 @@ def __init__( :keyword database: The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). Required. :paramtype database: JSON + :keyword driver_version: The driver version that you want to choose. Allowed value are v1 and + v2. Type: string (or Expression with resultType string). + :paramtype driver_version: JSON """ super().__init__( additional_properties=additional_properties, @@ -38696,6 +40749,7 @@ def __init__( self.type: str = "MongoDbAtlas" self.connection_string = connection_string self.database = database + self.driver_version = driver_version class MongoDbAtlasSink(CopySink): @@ -39145,9 +41199,8 @@ class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance- the server. The default value is false. Type: boolean (or Expression with resultType boolean). :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -39172,7 +41225,7 @@ class MongoDbLinkedService(LinkedService): # pylint: disable=too-many-instance- "port": {"key": "typeProperties.port", "type": "object"}, "enable_ssl": {"key": "typeProperties.enableSsl", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -39192,7 +41245,7 @@ def __init__( port: Optional[JSON] = None, enable_ssl: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -39236,9 +41289,8 @@ def __init__( boolean). :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -39794,14 +41846,14 @@ class MySqlLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar connection_string: The connection string. Required. + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -39818,7 +41870,7 @@ class MySqlLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -39831,7 +41883,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -39846,14 +41898,14 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword connection_string: The connection string. Required. + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -40095,9 +42147,8 @@ class NetezzaLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -40113,7 +42164,7 @@ class NetezzaLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -40126,7 +42177,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -40147,9 +42198,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -40549,9 +42599,8 @@ class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-at string (or Expression with resultType string). :vartype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -40585,7 +42634,7 @@ class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-at "key": "typeProperties.servicePrincipalEmbeddedCertPassword", "type": "SecretBase", }, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -40611,7 +42660,7 @@ def __init__( service_principal_key: Optional["_models.SecretBase"] = None, service_principal_embedded_cert: Optional["_models.SecretBase"] = None, service_principal_embedded_cert_password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -40670,9 +42719,8 @@ def __init__( string (or Expression with resultType string). :paramtype service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -40926,8 +42974,8 @@ class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-att :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] :ivar connection_string: The non-access credential portion of the connection string as well as - an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. - Required. + an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :vartype connection_string: JSON :ivar authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). @@ -40941,9 +42989,8 @@ class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-att :ivar password: Password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -40963,7 +43010,7 @@ class OdbcLinkedService(LinkedService): # pylint: disable=too-many-instance-att "credential": {"key": "typeProperties.credential", "type": "SecretBase"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -40979,7 +43026,7 @@ def __init__( credential: Optional["_models.SecretBase"] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -40995,8 +43042,8 @@ def __init__( :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] :keyword connection_string: The non-access credential portion of the connection string as well - as an optional encrypted credential. Type: string, SecureString or - AzureKeyVaultSecretReference. Required. + as an optional encrypted credential. Type: string, or SecureString, or + AzureKeyVaultSecretReference, or Expression with resultType string. Required. :paramtype connection_string: JSON :keyword authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). @@ -41010,9 +43057,8 @@ def __init__( :keyword password: Password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -41479,9 +43525,8 @@ class Office365LinkedService(LinkedService): # pylint: disable=too-many-instanc :ivar service_principal_key: Specify the application's key. Required. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -41503,7 +43548,7 @@ class Office365LinkedService(LinkedService): # pylint: disable=too-many-instanc "service_principal_tenant_id": {"key": "typeProperties.servicePrincipalTenantId", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -41518,7 +43563,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -41545,9 +43590,8 @@ def __init__( :keyword service_principal_key: Specify the application's key. Required. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -41603,8 +43647,8 @@ class Office365Source(CopySource): # pylint: disable=too-many-instance-attribut with resultType string). :vartype end_time: JSON :ivar output_columns: The columns to be read out from the Office 365 table. Type: array of - objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { - "name": "CreatedDateTime" } ]. + objects (or Expression with resultType array of objects). itemType: OutputColumn. Example: [ { + "name": "Id" }, { "name": "CreatedDateTime" } ]. :vartype output_columns: JSON """ @@ -41675,8 +43719,8 @@ def __init__( Expression with resultType string). :paramtype end_time: JSON :keyword output_columns: The columns to be read out from the Office 365 table. Type: array of - objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { - "name": "CreatedDateTime" } ]. + objects (or Expression with resultType array of objects). itemType: OutputColumn. Example: [ { + "name": "Id" }, { "name": "CreatedDateTime" } ]. :paramtype output_columns: JSON """ super().__init__( @@ -42074,9 +44118,8 @@ class OracleCloudStorageLinkedService(LinkedService): string). :vartype service_url: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -42093,7 +44136,7 @@ class OracleCloudStorageLinkedService(LinkedService): "access_key_id": {"key": "typeProperties.accessKeyId", "type": "object"}, "secret_access_key": {"key": "typeProperties.secretAccessKey", "type": "SecretBase"}, "service_url": {"key": "typeProperties.serviceUrl", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -42107,7 +44150,7 @@ def __init__( access_key_id: Optional[JSON] = None, secret_access_key: Optional["_models.SecretBase"] = None, service_url: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -42134,9 +44177,8 @@ def __init__( string). :paramtype service_url: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -42257,8 +44299,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too- configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :vartype file_list_path: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -42287,7 +44330,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): # pylint: disable=too- "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, "prefix": {"key": "prefix", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, "modified_datetime_start": {"key": "modifiedDatetimeStart", "type": "object"}, @@ -42305,7 +44348,7 @@ def __init__( wildcard_file_name: Optional[JSON] = None, prefix: Optional[JSON] = None, file_list_path: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, modified_datetime_start: Optional[JSON] = None, @@ -42338,8 +44381,9 @@ def __init__( configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :paramtype file_list_path: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -42396,9 +44440,8 @@ class OracleLinkedService(LinkedService): :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -42415,7 +44458,7 @@ class OracleLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -42428,7 +44471,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -42449,9 +44492,8 @@ def __init__( :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -42560,9 +44602,8 @@ class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-man boolean). :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -42585,7 +44626,7 @@ class OracleServiceCloudLinkedService(LinkedService): # pylint: disable=too-man "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -42602,7 +44643,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -42636,9 +44677,8 @@ def __init__( boolean). :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -43620,6 +45660,26 @@ def __init__( self.file_name_prefix = file_name_prefix +class OutputColumn(_serialization.Model): + """The columns to be read out from the Office 365 table. + + :ivar name: Name of the table column. Type: string. + :vartype name: str + """ + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + } + + def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None: + """ + :keyword name: Name of the table column. Type: string. + :paramtype name: str + """ + super().__init__(**kwargs) + self.name = name + + class PackageStore(_serialization.Model): """Package store for the SSIS integration runtime. @@ -43850,6 +45910,49 @@ def __init__( self.type: str = "ParquetFormat" +class ParquetReadSettings(FormatReadSettings): + """Parquet read settings. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: The read setting type. Required. + :vartype type: str + :ivar compression_properties: Compression settings. + :vartype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "compression_properties": {"key": "compressionProperties", "type": "CompressionReadSettings"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + compression_properties: Optional["_models.CompressionReadSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword compression_properties: Compression settings. + :paramtype compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings + """ + super().__init__(additional_properties=additional_properties, **kwargs) + self.type: str = "ParquetReadSettings" + self.compression_properties = compression_properties + + class ParquetSink(CopySink): """A copy activity Parquet sink. @@ -43981,6 +46084,8 @@ class ParquetSource(CopySource): :vartype disable_metrics_collection: JSON :ivar store_settings: Parquet store settings. :vartype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :ivar format_settings: Parquet format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.ParquetReadSettings :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :vartype additional_columns: JSON @@ -43998,6 +46103,7 @@ class ParquetSource(CopySource): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "store_settings": {"key": "storeSettings", "type": "StoreReadSettings"}, + "format_settings": {"key": "formatSettings", "type": "ParquetReadSettings"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, } @@ -44010,6 +46116,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, store_settings: Optional["_models.StoreReadSettings"] = None, + format_settings: Optional["_models.ParquetReadSettings"] = None, additional_columns: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -44031,6 +46138,8 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword store_settings: Parquet store settings. :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :keyword format_settings: Parquet format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.ParquetReadSettings :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :paramtype additional_columns: JSON @@ -44045,6 +46154,7 @@ def __init__( ) self.type: str = "ParquetSource" self.store_settings = store_settings + self.format_settings = format_settings self.additional_columns = additional_columns @@ -44122,7 +46232,7 @@ class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-a :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. + :ivar host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. :vartype host: JSON :ivar client_id: The client ID associated with your PayPal application. Required. :vartype client_id: JSON @@ -44139,9 +46249,8 @@ class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-a connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -44163,7 +46272,7 @@ class PaypalLinkedService(LinkedService): # pylint: disable=too-many-instance-a "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -44180,7 +46289,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -44195,7 +46304,7 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. + :keyword host: The URL of the PayPal instance. (i.e. api.sandbox.paypal.com). Required. :paramtype host: JSON :keyword client_id: The client ID associated with your PayPal application. Required. :paramtype client_id: JSON @@ -44212,9 +46321,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -44491,9 +46599,8 @@ class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance- the server. The default value is false. :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -44520,7 +46627,7 @@ class PhoenixLinkedService(LinkedService): # pylint: disable=too-many-instance- "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -44542,7 +46649,7 @@ def __init__( use_system_trust_store: Optional[JSON] = None, allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -44592,9 +46699,8 @@ def __init__( from the server. The default value is false. :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -44872,15 +46978,25 @@ class PipelineExternalComputeScaleProperties(_serialization.Model): :ivar time_to_live: Time to live (in minutes) setting of integration runtime which will execute pipeline and external activity. :vartype time_to_live: int + :ivar number_of_pipeline_nodes: Number of the pipeline nodes, which should be greater than 0 + and less than 11. + :vartype number_of_pipeline_nodes: int + :ivar number_of_external_nodes: Number of the the external nodes, which should be greater than + 0 and less than 11. + :vartype number_of_external_nodes: int """ _validation = { "time_to_live": {"minimum": 5}, + "number_of_pipeline_nodes": {"maximum": 10, "minimum": 1}, + "number_of_external_nodes": {"maximum": 10, "minimum": 1}, } _attribute_map = { "additional_properties": {"key": "", "type": "{object}"}, "time_to_live": {"key": "timeToLive", "type": "int"}, + "number_of_pipeline_nodes": {"key": "numberOfPipelineNodes", "type": "int"}, + "number_of_external_nodes": {"key": "numberOfExternalNodes", "type": "int"}, } def __init__( @@ -44888,6 +47004,8 @@ def __init__( *, additional_properties: Optional[Dict[str, JSON]] = None, time_to_live: Optional[int] = None, + number_of_pipeline_nodes: Optional[int] = None, + number_of_external_nodes: Optional[int] = None, **kwargs: Any ) -> None: """ @@ -44897,10 +47015,18 @@ def __init__( :keyword time_to_live: Time to live (in minutes) setting of integration runtime which will execute pipeline and external activity. :paramtype time_to_live: int + :keyword number_of_pipeline_nodes: Number of the pipeline nodes, which should be greater than 0 + and less than 11. + :paramtype number_of_pipeline_nodes: int + :keyword number_of_external_nodes: Number of the the external nodes, which should be greater + than 0 and less than 11. + :paramtype number_of_external_nodes: int """ super().__init__(**kwargs) self.additional_properties = additional_properties self.time_to_live = time_to_live + self.number_of_pipeline_nodes = number_of_pipeline_nodes + self.number_of_external_nodes = number_of_external_nodes class PipelineFolder(_serialization.Model): @@ -45395,14 +47521,14 @@ class PostgreSqlLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar connection_string: The connection string. Required. + :ivar connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. :vartype connection_string: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -45419,7 +47545,7 @@ class PostgreSqlLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -45432,7 +47558,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -45447,14 +47573,14 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword connection_string: The connection string. Required. + :keyword connection_string: The connection string. Type: string, SecureString or + AzureKeyVaultSecretReference. Required. :paramtype connection_string: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -45933,9 +48059,8 @@ class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-a are specified in the IANA Time Zone Database. The default value is the system time zone. :vartype time_zone_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -45966,7 +48091,7 @@ class PrestoLinkedService(LinkedService): # pylint: disable=too-many-instance-a "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, "time_zone_id": {"key": "typeProperties.timeZoneID", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -45990,7 +48115,7 @@ def __init__( allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, time_zone_id: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -46042,9 +48167,8 @@ def __init__( are specified in the IANA Time Zone Database. The default value is the system time zone. :paramtype time_zone_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -46690,9 +48814,8 @@ class QuickbaseLinkedService(LinkedService): :ivar user_token: The user token for the Quickbase source. Required. :vartype user_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -46710,7 +48833,7 @@ class QuickbaseLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "url": {"key": "typeProperties.url", "type": "object"}, "user_token": {"key": "typeProperties.userToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -46723,7 +48846,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -46744,9 +48867,8 @@ def __init__( :keyword user_token: The user token for the Quickbase source. Required. :paramtype user_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -46799,9 +48921,8 @@ class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instan HTTPS. The default value is true. :vartype use_encrypted_endpoints: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -46823,7 +48944,7 @@ class QuickBooksLinkedService(LinkedService): # pylint: disable=too-many-instan "access_token": {"key": "typeProperties.accessToken", "type": "SecretBase"}, "access_token_secret": {"key": "typeProperties.accessTokenSecret", "type": "SecretBase"}, "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -46842,7 +48963,7 @@ def __init__( access_token: Optional["_models.SecretBase"] = None, access_token_secret: Optional["_models.SecretBase"] = None, use_encrypted_endpoints: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -46876,9 +48997,8 @@ def __init__( using HTTPS. The default value is true. :paramtype use_encrypted_endpoints: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -47684,9 +49804,8 @@ class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instanc boolean). :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -47708,7 +49827,7 @@ class ResponsysLinkedService(LinkedService): # pylint: disable=too-many-instanc "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -47725,7 +49844,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -47760,9 +49879,8 @@ def __init__( boolean). :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -48022,12 +50140,10 @@ class RestResourceDataset(Dataset): # pylint: disable=too-many-instance-attribu :ivar request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). :vartype request_body: JSON - :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). - :vartype additional_headers: JSON - :ivar pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :vartype pagination_rules: JSON + :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. + :vartype additional_headers: dict[str, JSON] + :ivar pagination_rules: The pagination rules to compose next page requests. + :vartype pagination_rules: dict[str, JSON] """ _validation = { @@ -48048,8 +50164,8 @@ class RestResourceDataset(Dataset): # pylint: disable=too-many-instance-attribu "relative_url": {"key": "typeProperties.relativeUrl", "type": "object"}, "request_method": {"key": "typeProperties.requestMethod", "type": "object"}, "request_body": {"key": "typeProperties.requestBody", "type": "object"}, - "additional_headers": {"key": "typeProperties.additionalHeaders", "type": "object"}, - "pagination_rules": {"key": "typeProperties.paginationRules", "type": "object"}, + "additional_headers": {"key": "typeProperties.additionalHeaders", "type": "{object}"}, + "pagination_rules": {"key": "typeProperties.paginationRules", "type": "{object}"}, } def __init__( @@ -48066,8 +50182,8 @@ def __init__( relative_url: Optional[JSON] = None, request_method: Optional[JSON] = None, request_body: Optional[JSON] = None, - additional_headers: Optional[JSON] = None, - pagination_rules: Optional[JSON] = None, + additional_headers: Optional[Dict[str, JSON]] = None, + pagination_rules: Optional[Dict[str, JSON]] = None, **kwargs: Any ) -> None: """ @@ -48101,11 +50217,9 @@ def __init__( string (or Expression with resultType string). :paramtype request_body: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. - Type: string (or Expression with resultType string). - :paramtype additional_headers: JSON - :keyword pagination_rules: The pagination rules to compose next page requests. Type: string (or - Expression with resultType string). - :paramtype pagination_rules: JSON + :paramtype additional_headers: dict[str, JSON] + :keyword pagination_rules: The pagination rules to compose next page requests. + :paramtype pagination_rules: dict[str, JSON] """ super().__init__( additional_properties=additional_properties, @@ -48144,7 +50258,8 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar url: The base URL of the REST service. Required. + :ivar url: The base URL of the REST service. Type: string (or Expression with resultType + string). Required. :vartype url: JSON :ivar enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with @@ -48155,7 +50270,8 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta "ManagedServiceIdentity", and "OAuth2ClientCredential". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :ivar user_name: The user name used in Basic authentication type. + :ivar user_name: The user name used in Basic authentication type. Type: string (or Expression + with resultType string). :vartype user_name: JSON :ivar password: The password used in Basic authentication type. :vartype password: ~azure.mgmt.datafactory.models.SecretBase @@ -48163,24 +50279,25 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta authorization. Type: object (or Expression with resultType object). :vartype auth_headers: JSON :ivar service_principal_id: The application's client ID used in AadServicePrincipal - authentication type. + authentication type. Type: string (or Expression with resultType string). :vartype service_principal_id: JSON :ivar service_principal_key: The application's key used in AadServicePrincipal authentication type. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal - authentication type under which your application resides. + authentication type under which your application resides. Type: string (or Expression with + resultType string). :vartype tenant: JSON :ivar azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :vartype azure_cloud_type: JSON - :ivar aad_resource_id: The resource you are requesting authorization to use. + :ivar aad_resource_id: The resource you are requesting authorization to use. Type: string (or + Expression with resultType string). :vartype aad_resource_id: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference :ivar client_id: The client ID associated with your application. Type: string (or Expression @@ -48226,7 +50343,7 @@ class RestServiceLinkedService(LinkedService): # pylint: disable=too-many-insta "tenant": {"key": "typeProperties.tenant", "type": "object"}, "azure_cloud_type": {"key": "typeProperties.azureCloudType", "type": "object"}, "aad_resource_id": {"key": "typeProperties.aadResourceId", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "credential": {"key": "typeProperties.credential", "type": "CredentialReference"}, "client_id": {"key": "typeProperties.clientId", "type": "object"}, "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, @@ -48254,7 +50371,7 @@ def __init__( # pylint: disable=too-many-locals tenant: Optional[JSON] = None, azure_cloud_type: Optional[JSON] = None, aad_resource_id: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, credential: Optional["_models.CredentialReference"] = None, client_id: Optional[JSON] = None, client_secret: Optional["_models.SecretBase"] = None, @@ -48275,7 +50392,8 @@ def __init__( # pylint: disable=too-many-locals :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword url: The base URL of the REST service. Required. + :keyword url: The base URL of the REST service. Type: string (or Expression with resultType + string). Required. :paramtype url: JSON :keyword enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with @@ -48286,7 +50404,8 @@ def __init__( # pylint: disable=too-many-locals "ManagedServiceIdentity", and "OAuth2ClientCredential". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType - :keyword user_name: The user name used in Basic authentication type. + :keyword user_name: The user name used in Basic authentication type. Type: string (or + Expression with resultType string). :paramtype user_name: JSON :keyword password: The password used in Basic authentication type. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase @@ -48294,24 +50413,25 @@ def __init__( # pylint: disable=too-many-locals authorization. Type: object (or Expression with resultType object). :paramtype auth_headers: JSON :keyword service_principal_id: The application's client ID used in AadServicePrincipal - authentication type. + authentication type. Type: string (or Expression with resultType string). :paramtype service_principal_id: JSON :keyword service_principal_key: The application's key used in AadServicePrincipal authentication type. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal - authentication type under which your application resides. + authentication type under which your application resides. Type: string (or Expression with + resultType string). :paramtype tenant: JSON :keyword azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :paramtype azure_cloud_type: JSON - :keyword aad_resource_id: The resource you are requesting authorization to use. + :keyword aad_resource_id: The resource you are requesting authorization to use. Type: string + (or Expression with resultType string). :paramtype aad_resource_id: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference :keyword client_id: The client ID associated with your application. Type: string (or Expression @@ -48751,7 +50871,7 @@ class RunQueryFilter(_serialization.Model): "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :vartype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :ivar operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :vartype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :ivar values: List of filter values. Required. :vartype values: list[str] @@ -48786,7 +50906,7 @@ def __init__( "ActivityType", "TriggerName", "TriggerRunTimestamp", "RunGroupId", and "LatestOnly". :paramtype operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :keyword operator: Operator to be used for filter. Required. Known values are: "Equals", - "NotEquals", "In", and "NotIn". + "NotEquals", "In", "NotIn", and "In". :paramtype operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :keyword values: List of filter values. Required. :paramtype values: list[str] @@ -48880,9 +51000,8 @@ class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instan resultType string). :vartype api_version: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -48901,7 +51020,7 @@ class SalesforceLinkedService(LinkedService): # pylint: disable=too-many-instan "password": {"key": "typeProperties.password", "type": "SecretBase"}, "security_token": {"key": "typeProperties.securityToken", "type": "SecretBase"}, "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -48917,7 +51036,7 @@ def __init__( password: Optional["_models.SecretBase"] = None, security_token: Optional["_models.SecretBase"] = None, api_version: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -48948,9 +51067,8 @@ def __init__( resultType string). :paramtype api_version: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -49008,9 +51126,8 @@ class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=t boolean). :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -49030,7 +51147,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): # pylint: disable=t "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -49047,7 +51164,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -49083,9 +51200,8 @@ def __init__( boolean). :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -49450,9 +51566,8 @@ class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too (or Expression with resultType string). :vartype extended_properties: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -49472,7 +51587,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): # pylint: disable=too "security_token": {"key": "typeProperties.securityToken", "type": "SecretBase"}, "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, "extended_properties": {"key": "typeProperties.extendedProperties", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -49489,7 +51604,7 @@ def __init__( security_token: Optional["_models.SecretBase"] = None, api_version: Optional[JSON] = None, extended_properties: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -49523,9 +51638,8 @@ def __init__( string (or Expression with resultType string). :paramtype extended_properties: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -49802,9 +51916,9 @@ class SalesforceServiceCloudSource(CopySource): :vartype disable_metrics_collection: JSON :ivar query: Database query. Type: string (or Expression with resultType string). :vartype query: JSON - :ivar read_behavior: The read behavior for the operation. Default is Query. Known values are: - "Query" and "QueryAll". - :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :ivar read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :vartype read_behavior: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :vartype additional_columns: JSON @@ -49822,7 +51936,7 @@ class SalesforceServiceCloudSource(CopySource): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "query": {"key": "query", "type": "object"}, - "read_behavior": {"key": "readBehavior", "type": "str"}, + "read_behavior": {"key": "readBehavior", "type": "object"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, } @@ -49835,7 +51949,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, query: Optional[JSON] = None, - read_behavior: Optional[Union[str, "_models.SalesforceSourceReadBehavior"]] = None, + read_behavior: Optional[JSON] = None, additional_columns: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -49857,9 +51971,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword query: Database query. Type: string (or Expression with resultType string). :paramtype query: JSON - :keyword read_behavior: The read behavior for the operation. Default is Query. Known values - are: "Query" and "QueryAll". - :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :keyword read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :paramtype read_behavior: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :paramtype additional_columns: JSON @@ -50037,9 +52151,9 @@ class SalesforceSource(TabularSource): :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). :vartype query: JSON - :ivar read_behavior: The read behavior for the operation. Default is Query. Known values are: - "Query" and "QueryAll". - :vartype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :ivar read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :vartype read_behavior: JSON """ _validation = { @@ -50056,7 +52170,7 @@ class SalesforceSource(TabularSource): "query_timeout": {"key": "queryTimeout", "type": "object"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, "query": {"key": "query", "type": "object"}, - "read_behavior": {"key": "readBehavior", "type": "str"}, + "read_behavior": {"key": "readBehavior", "type": "object"}, } def __init__( @@ -50070,7 +52184,7 @@ def __init__( query_timeout: Optional[JSON] = None, additional_columns: Optional[JSON] = None, query: Optional[JSON] = None, - read_behavior: Optional[Union[str, "_models.SalesforceSourceReadBehavior"]] = None, + read_behavior: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -50097,9 +52211,9 @@ def __init__( :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). :paramtype query: JSON - :keyword read_behavior: The read behavior for the operation. Default is Query. Known values - are: "Query" and "QueryAll". - :paramtype read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :keyword read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :paramtype read_behavior: JSON """ super().__init__( additional_properties=additional_properties, @@ -50244,9 +52358,8 @@ class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-at :ivar password: Password to access the SAP BW server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -50268,7 +52381,7 @@ class SapBWLinkedService(LinkedService): # pylint: disable=too-many-instance-at "client_id": {"key": "typeProperties.clientId", "type": "object"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -50284,7 +52397,7 @@ def __init__( annotations: Optional[List[JSON]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -50314,9 +52427,8 @@ def __init__( :keyword password: Password to access the SAP BW server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -50464,8 +52576,8 @@ class SapCloudForCustomerLinkedService(LinkedService): :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :vartype encrypted_credential: JSON + username/password must be provided. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -50483,7 +52595,7 @@ class SapCloudForCustomerLinkedService(LinkedService): "url": {"key": "typeProperties.url", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -50497,7 +52609,7 @@ def __init__( annotations: Optional[List[JSON]] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -50523,8 +52635,8 @@ def __init__( :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). - :paramtype encrypted_credential: JSON + username/password must be provided. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -50899,15 +53011,15 @@ class SapEccLinkedService(LinkedService): :ivar url: The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). Required. - :vartype url: str + :vartype url: JSON :ivar username: The username for Basic authentication. Type: string (or Expression with resultType string). - :vartype username: str + :vartype username: JSON :ivar password: The password for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). + username/password must be provided. Type: string. :vartype encrypted_credential: str """ @@ -50923,8 +53035,8 @@ class SapEccLinkedService(LinkedService): "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, - "url": {"key": "typeProperties.url", "type": "str"}, - "username": {"key": "typeProperties.username", "type": "str"}, + "url": {"key": "typeProperties.url", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -50932,13 +53044,13 @@ class SapEccLinkedService(LinkedService): def __init__( self, *, - url: str, + url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - username: Optional[str] = None, + username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any @@ -50958,15 +53070,15 @@ def __init__( :keyword url: The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). Required. - :paramtype url: str + :paramtype url: JSON :keyword username: The username for Basic authentication. Type: string (or Expression with resultType string). - :paramtype username: str + :paramtype username: JSON :keyword password: The password for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or - username/password must be provided. Type: string (or Expression with resultType string). + username/password must be provided. Type: string. :paramtype encrypted_credential: str """ super().__init__( @@ -51237,9 +53349,8 @@ class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance- :ivar password: Password to access the SAP HANA server. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -51258,7 +53369,7 @@ class SapHanaLinkedService(LinkedService): # pylint: disable=too-many-instance- "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -51274,7 +53385,7 @@ def __init__( authentication_type: Optional[Union[str, "_models.SapHanaAuthenticationType"]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -51304,9 +53415,8 @@ def __init__( :keyword password: Password to access the SAP HANA server. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -51656,9 +53766,8 @@ class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-a string). :vartype subscriber_name: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -51689,7 +53798,7 @@ class SapOdpLinkedService(LinkedService): # pylint: disable=too-many-instance-a "x509_certificate_path": {"key": "typeProperties.x509CertificatePath", "type": "object"}, "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, "subscriber_name": {"key": "typeProperties.subscriberName", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( # pylint: disable=too-many-locals @@ -51717,7 +53826,7 @@ def __init__( # pylint: disable=too-many-locals x509_certificate_path: Optional[JSON] = None, logon_group: Optional[JSON] = None, subscriber_name: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -51785,9 +53894,8 @@ def __init__( # pylint: disable=too-many-locals string). :paramtype subscriber_name: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -52112,9 +54220,8 @@ class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instan resultType string). :vartype logon_group: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -52138,7 +54245,7 @@ class SapOpenHubLinkedService(LinkedService): # pylint: disable=too-many-instan "message_server": {"key": "typeProperties.messageServer", "type": "object"}, "message_server_service": {"key": "typeProperties.messageServerService", "type": "object"}, "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -52159,7 +54266,7 @@ def __init__( message_server: Optional[JSON] = None, message_server_service: Optional[JSON] = None, logon_group: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -52207,9 +54314,8 @@ def __init__( resultType string). :paramtype logon_group: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -52556,9 +54662,8 @@ class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance resultType string). :vartype logon_group: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -52587,7 +54692,7 @@ class SapTableLinkedService(LinkedService): # pylint: disable=too-many-instance "snc_library_path": {"key": "typeProperties.sncLibraryPath", "type": "object"}, "snc_qop": {"key": "typeProperties.sncQop", "type": "object"}, "logon_group": {"key": "typeProperties.logonGroup", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -52613,7 +54718,7 @@ def __init__( snc_library_path: Optional[JSON] = None, snc_qop: Optional[JSON] = None, logon_group: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -52675,9 +54780,8 @@ def __init__( resultType string). :paramtype logon_group: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -53253,6 +55357,13 @@ class ScriptActivity(ExecutionActivity): # pylint: disable=too-many-instance-at :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -53281,6 +55392,8 @@ class ScriptActivity(ExecutionActivity): # pylint: disable=too-many-instance-at "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -53296,6 +55409,8 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -53313,6 +55428,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -53334,6 +55456,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -53355,7 +55479,7 @@ class ScriptActivityParameter(_serialization.Model): "DateTimeOffset", "Decimal", "Double", "Guid", "Int16", "Int32", "Int64", "Single", "String", and "Timespan". :vartype type: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterType - :ivar value: The value of the parameter. + :ivar value: The value of the parameter. Type: string (or Expression with resultType string). :vartype value: JSON :ivar direction: The direction of the parameter. Known values are: "Input", "Output", and "InputOutput". @@ -53389,7 +55513,8 @@ def __init__( "DateTimeOffset", "Decimal", "Double", "Guid", "Int16", "Int32", "Int64", "Single", "String", and "Timespan". :paramtype type: str or ~azure.mgmt.datafactory.models.ScriptActivityParameterType - :keyword value: The value of the parameter. + :keyword value: The value of the parameter. Type: string (or Expression with resultType + string). :paramtype value: JSON :keyword direction: The direction of the parameter. Known values are: "Input", "Output", and "InputOutput". @@ -53494,6 +55619,38 @@ def __init__( self.log_location_settings = log_location_settings +class SecureInputOutputPolicy(_serialization.Model): + """Execution policy for an activity that supports secure input and output. + + :ivar secure_input: When set to true, Input from activity is considered as secure and will not + be logged to monitoring. + :vartype secure_input: bool + :ivar secure_output: When set to true, Output from activity is considered as secure and will + not be logged to monitoring. + :vartype secure_output: bool + """ + + _attribute_map = { + "secure_input": {"key": "secureInput", "type": "bool"}, + "secure_output": {"key": "secureOutput", "type": "bool"}, + } + + def __init__( + self, *, secure_input: Optional[bool] = None, secure_output: Optional[bool] = None, **kwargs: Any + ) -> None: + """ + :keyword secure_input: When set to true, Input from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_input: bool + :keyword secure_output: When set to true, Output from activity is considered as secure and will + not be logged to monitoring. + :paramtype secure_output: bool + """ + super().__init__(**kwargs) + self.secure_input = secure_input + self.secure_output = secure_output + + class SecureString(SecretBase): """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. @@ -53588,6 +55745,10 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): :vartype description: str :ivar linked_info: The base definition of a linked integration runtime. :vartype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + :ivar self_contained_interactive_authoring_enabled: An alternative option to ensure interactive + authoring function when your self-hosted integration runtime is unable to establish a + connection with Azure Relay. + :vartype self_contained_interactive_authoring_enabled: bool """ _validation = { @@ -53599,6 +55760,10 @@ class SelfHostedIntegrationRuntime(IntegrationRuntime): "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, "linked_info": {"key": "typeProperties.linkedInfo", "type": "LinkedIntegrationRuntimeType"}, + "self_contained_interactive_authoring_enabled": { + "key": "typeProperties.selfContainedInteractiveAuthoringEnabled", + "type": "bool", + }, } def __init__( @@ -53607,6 +55772,7 @@ def __init__( additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, linked_info: Optional["_models.LinkedIntegrationRuntimeType"] = None, + self_contained_interactive_authoring_enabled: Optional[bool] = None, **kwargs: Any ) -> None: """ @@ -53617,10 +55783,15 @@ def __init__( :paramtype description: str :keyword linked_info: The base definition of a linked integration runtime. :paramtype linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + :keyword self_contained_interactive_authoring_enabled: An alternative option to ensure + interactive authoring function when your self-hosted integration runtime is unable to establish + a connection with Azure Relay. + :paramtype self_contained_interactive_authoring_enabled: bool """ super().__init__(additional_properties=additional_properties, description=description, **kwargs) self.type: str = "SelfHosted" self.linked_info = linked_info + self.self_contained_interactive_authoring_enabled = self_contained_interactive_authoring_enabled class SelfHostedIntegrationRuntimeNode(_serialization.Model): # pylint: disable=too-many-instance-attributes @@ -53805,6 +55976,10 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): # pylint: d :ivar auto_update_eta: The estimated time when the self-hosted integration runtime will be updated. :vartype auto_update_eta: ~datetime.datetime + :ivar self_contained_interactive_authoring_enabled: An alternative option to ensure interactive + authoring function when your self-hosted integration runtime is unable to establish a + connection with Azure Relay. + :vartype self_contained_interactive_authoring_enabled: bool """ _validation = { @@ -53825,6 +56000,7 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): # pylint: d "pushed_version": {"readonly": True}, "latest_version": {"readonly": True}, "auto_update_eta": {"readonly": True}, + "self_contained_interactive_authoring_enabled": {"readonly": True}, } _attribute_map = { @@ -53848,6 +56024,10 @@ class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): # pylint: d "pushed_version": {"key": "typeProperties.pushedVersion", "type": "str"}, "latest_version": {"key": "typeProperties.latestVersion", "type": "str"}, "auto_update_eta": {"key": "typeProperties.autoUpdateETA", "type": "iso-8601"}, + "self_contained_interactive_authoring_enabled": { + "key": "typeProperties.selfContainedInteractiveAuthoringEnabled", + "type": "bool", + }, } def __init__( @@ -53886,6 +56066,7 @@ def __init__( self.pushed_version = None self.latest_version = None self.auto_update_eta = None + self.self_contained_interactive_authoring_enabled = None class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes @@ -53934,9 +56115,8 @@ class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instan connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -53961,7 +56141,7 @@ class ServiceNowLinkedService(LinkedService): # pylint: disable=too-many-instan "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -53981,7 +56161,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -54024,9 +56204,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -54324,7 +56503,7 @@ def __init__( self.tenant = tenant -class SetVariableActivity(ControlActivity): +class SetVariableActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """Set value for a Variable. All required parameters must be populated in order to send to Azure. @@ -54338,14 +56517,25 @@ class SetVariableActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.SecureInputOutputPolicy :ivar variable_name: Name of the variable whose value needs to be set. :vartype variable_name: str :ivar value: Value to be set. Could be a static value or Expression. :vartype value: JSON + :ivar set_system_variable: If set to true, it sets the pipeline run return value. + :vartype set_system_variable: bool """ _validation = { @@ -54358,10 +56548,14 @@ class SetVariableActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "policy": {"key": "policy", "type": "SecureInputOutputPolicy"}, "variable_name": {"key": "typeProperties.variableName", "type": "str"}, "value": {"key": "typeProperties.value", "type": "object"}, + "set_system_variable": {"key": "typeProperties.setSystemVariable", "type": "bool"}, } def __init__( @@ -54370,10 +56564,14 @@ def __init__( name: str, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, + policy: Optional["_models.SecureInputOutputPolicy"] = None, variable_name: Optional[str] = None, value: Optional[JSON] = None, + set_system_variable: Optional[bool] = None, **kwargs: Any ) -> None: """ @@ -54384,26 +56582,41 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.SecureInputOutputPolicy :keyword variable_name: Name of the variable whose value needs to be set. :paramtype variable_name: str :keyword value: Value to be set. Could be a static value or Expression. :paramtype value: JSON + :keyword set_system_variable: If set to true, it sets the pipeline run return value. + :paramtype set_system_variable: bool """ super().__init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs ) self.type: str = "SetVariable" + self.policy = policy self.variable_name = variable_name self.value = value + self.set_system_variable = set_system_variable class SftpLocation(DatasetLocation): @@ -54485,8 +56698,9 @@ class SftpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance- :ivar wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :vartype wildcard_file_name: JSON - :ivar enable_partition_discovery: Indicates whether to enable partition discovery. - :vartype enable_partition_discovery: bool + :ivar enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :vartype enable_partition_discovery: JSON :ivar partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :vartype partition_root_path: JSON @@ -54520,7 +56734,7 @@ class SftpReadSettings(StoreReadSettings): # pylint: disable=too-many-instance- "recursive": {"key": "recursive", "type": "object"}, "wildcard_folder_path": {"key": "wildcardFolderPath", "type": "object"}, "wildcard_file_name": {"key": "wildcardFileName", "type": "object"}, - "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "bool"}, + "enable_partition_discovery": {"key": "enablePartitionDiscovery", "type": "object"}, "partition_root_path": {"key": "partitionRootPath", "type": "object"}, "file_list_path": {"key": "fileListPath", "type": "object"}, "delete_files_after_completion": {"key": "deleteFilesAfterCompletion", "type": "object"}, @@ -54538,7 +56752,7 @@ def __init__( recursive: Optional[JSON] = None, wildcard_folder_path: Optional[JSON] = None, wildcard_file_name: Optional[JSON] = None, - enable_partition_discovery: Optional[bool] = None, + enable_partition_discovery: Optional[JSON] = None, partition_root_path: Optional[JSON] = None, file_list_path: Optional[JSON] = None, delete_files_after_completion: Optional[JSON] = None, @@ -54566,8 +56780,9 @@ def __init__( :keyword wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :paramtype wildcard_file_name: JSON - :keyword enable_partition_discovery: Indicates whether to enable partition discovery. - :paramtype enable_partition_discovery: bool + :keyword enable_partition_discovery: Indicates whether to enable partition discovery. Type: + boolean (or Expression with resultType boolean). + :paramtype enable_partition_discovery: JSON :keyword partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :paramtype partition_root_path: JSON @@ -54640,9 +56855,8 @@ class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instan :ivar password: Password to logon the SFTP server for Basic authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH @@ -54681,7 +56895,7 @@ class SftpServerLinkedService(LinkedService): # pylint: disable=too-many-instan "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "private_key_path": {"key": "typeProperties.privateKeyPath", "type": "object"}, "private_key_content": {"key": "typeProperties.privateKeyContent", "type": "SecretBase"}, "pass_phrase": {"key": "typeProperties.passPhrase", "type": "SecretBase"}, @@ -54702,7 +56916,7 @@ def __init__( authentication_type: Optional[Union[str, "_models.SftpAuthenticationType"]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, private_key_path: Optional[JSON] = None, private_key_content: Optional["_models.SecretBase"] = None, pass_phrase: Optional["_models.SecretBase"] = None, @@ -54737,9 +56951,8 @@ def __init__( :keyword password: Password to logon the SFTP server for Basic authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH @@ -54900,9 +57113,8 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m Directory. Type: string (or Expression with resultType string). Required. :vartype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -54924,7 +57136,7 @@ class SharePointOnlineListLinkedService(LinkedService): # pylint: disable=too-m "tenant_id": {"key": "typeProperties.tenantId", "type": "object"}, "service_principal_id": {"key": "typeProperties.servicePrincipalId", "type": "object"}, "service_principal_key": {"key": "typeProperties.servicePrincipalKey", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -54939,7 +57151,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -54970,9 +57182,8 @@ def __init__( Active Directory. Type: string (or Expression with resultType string). Required. :paramtype service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -55223,9 +57434,8 @@ class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance- connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -55245,7 +57455,7 @@ class ShopifyLinkedService(LinkedService): # pylint: disable=too-many-instance- "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -55261,7 +57471,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -55292,9 +57502,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -55570,9 +57779,8 @@ class SmartsheetLinkedService(LinkedService): :ivar api_token: The api token for the Smartsheet source. Required. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -55588,7 +57796,7 @@ class SmartsheetLinkedService(LinkedService): "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -55600,7 +57808,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -55618,9 +57826,8 @@ def __init__( :keyword api_token: The api token for the Smartsheet source. Required. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -55896,9 +58103,8 @@ class SnowflakeLinkedService(LinkedService): :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -55915,7 +58121,7 @@ class SnowflakeLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -55928,7 +58134,7 @@ def __init__( parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -55949,9 +58155,8 @@ def __init__( :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -56260,9 +58465,8 @@ class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-at the server. The default value is false. :vartype allow_self_signed_server_cert: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -56292,7 +58496,7 @@ class SparkLinkedService(LinkedService): # pylint: disable=too-many-instance-at "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "allow_host_name_cn_mismatch": {"key": "typeProperties.allowHostNameCNMismatch", "type": "object"}, "allow_self_signed_server_cert": {"key": "typeProperties.allowSelfSignedServerCert", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -56316,7 +58520,7 @@ def __init__( use_system_trust_store: Optional[JSON] = None, allow_host_name_cn_mismatch: Optional[JSON] = None, allow_self_signed_server_cert: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -56371,9 +58575,8 @@ def __init__( from the server. The default value is false. :paramtype allow_self_signed_server_cert: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -56629,8 +58832,8 @@ class SqlAlwaysEncryptedProperties(_serialization.Model): All required parameters must be populated in order to send to Azure. :ivar always_encrypted_akv_auth_type: Sql always encrypted AKV authentication type. Type: - string (or Expression with resultType string). Required. Known values are: "ServicePrincipal", - "ManagedIdentity", and "UserAssignedManagedIdentity". + string. Required. Known values are: "ServicePrincipal", "ManagedIdentity", and + "UserAssignedManagedIdentity". :vartype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :ivar service_principal_id: The client ID of the application in Azure Active Directory used for @@ -56665,8 +58868,8 @@ def __init__( ) -> None: """ :keyword always_encrypted_akv_auth_type: Sql always encrypted AKV authentication type. Type: - string (or Expression with resultType string). Required. Known values are: "ServicePrincipal", - "ManagedIdentity", and "UserAssignedManagedIdentity". + string. Required. Known values are: "ServicePrincipal", "ManagedIdentity", and + "UserAssignedManagedIdentity". :paramtype always_encrypted_akv_auth_type: str or ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType :keyword service_principal_id: The client ID of the application in Azure Active Directory used @@ -56894,6 +59097,10 @@ class SqlDWSource(TabularSource): # pylint: disable=too-many-instance-attribute Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :vartype stored_procedure_parameters: JSON + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :vartype isolation_level: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :vartype partition_option: JSON @@ -56917,6 +59124,7 @@ class SqlDWSource(TabularSource): # pylint: disable=too-many-instance-attribute "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, "partition_option": {"key": "partitionOption", "type": "object"}, "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, } @@ -56934,6 +59142,7 @@ def __init__( sql_reader_query: Optional[JSON] = None, sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[JSON] = None, + isolation_level: Optional[JSON] = None, partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, **kwargs: Any @@ -56971,6 +59180,10 @@ def __init__( Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :paramtype stored_procedure_parameters: JSON + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". :paramtype partition_option: JSON @@ -56991,6 +59204,7 @@ def __init__( self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.partition_option = partition_option self.partition_settings = partition_settings @@ -57237,6 +59451,10 @@ class SqlMISource(TabularSource): # pylint: disable=too-many-instance-attribute :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: JSON + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :vartype isolation_level: JSON :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -57262,6 +59480,7 @@ class SqlMISource(TabularSource): # pylint: disable=too-many-instance-attribute "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, "partition_option": {"key": "partitionOption", "type": "object"}, "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, @@ -57280,6 +59499,7 @@ def __init__( sql_reader_query: Optional[JSON] = None, sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[JSON] = None, + isolation_level: Optional[JSON] = None, produce_additional_types: Optional[JSON] = None, partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, @@ -57317,6 +59537,10 @@ def __init__( :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: JSON + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: JSON :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -57339,6 +59563,7 @@ def __init__( self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings @@ -57428,9 +59653,8 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc :ivar password: The on-premises Windows authentication password. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str :ivar always_encrypted_settings: Sql always encrypted properties. :vartype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ @@ -57450,7 +59674,7 @@ class SqlServerLinkedService(LinkedService): # pylint: disable=too-many-instanc "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, "always_encrypted_settings": { "key": "typeProperties.alwaysEncryptedSettings", "type": "SqlAlwaysEncryptedProperties", @@ -57468,7 +59692,7 @@ def __init__( annotations: Optional[List[JSON]] = None, user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, always_encrypted_settings: Optional["_models.SqlAlwaysEncryptedProperties"] = None, **kwargs: Any ) -> None: @@ -57493,9 +59717,8 @@ def __init__( :keyword password: The on-premises Windows authentication password. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str :keyword always_encrypted_settings: Sql always encrypted properties. :paramtype always_encrypted_settings: ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties @@ -57726,6 +59949,10 @@ class SqlServerSource(TabularSource): # pylint: disable=too-many-instance-attri :ivar stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :vartype stored_procedure_parameters: JSON + :ivar isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :vartype isolation_level: JSON :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -57751,6 +59978,7 @@ class SqlServerSource(TabularSource): # pylint: disable=too-many-instance-attri "sql_reader_query": {"key": "sqlReaderQuery", "type": "object"}, "sql_reader_stored_procedure_name": {"key": "sqlReaderStoredProcedureName", "type": "object"}, "stored_procedure_parameters": {"key": "storedProcedureParameters", "type": "object"}, + "isolation_level": {"key": "isolationLevel", "type": "object"}, "produce_additional_types": {"key": "produceAdditionalTypes", "type": "object"}, "partition_option": {"key": "partitionOption", "type": "object"}, "partition_settings": {"key": "partitionSettings", "type": "SqlPartitionSettings"}, @@ -57769,6 +59997,7 @@ def __init__( sql_reader_query: Optional[JSON] = None, sql_reader_stored_procedure_name: Optional[JSON] = None, stored_procedure_parameters: Optional[JSON] = None, + isolation_level: Optional[JSON] = None, produce_additional_types: Optional[JSON] = None, partition_option: Optional[JSON] = None, partition_settings: Optional["_models.SqlPartitionSettings"] = None, @@ -57806,6 +60035,10 @@ def __init__( :keyword stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :paramtype stored_procedure_parameters: JSON + :keyword isolation_level: Specifies the transaction locking behavior for the SQL source. + Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default + value is ReadCommitted. Type: string (or Expression with resultType string). + :paramtype isolation_level: JSON :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. @@ -57828,12 +60061,13 @@ def __init__( self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings -class SqlServerStoredProcedureActivity(ExecutionActivity): +class SqlServerStoredProcedureActivity(ExecutionActivity): # pylint: disable=too-many-instance-attributes """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -57847,6 +60081,13 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -57874,6 +60115,8 @@ class SqlServerStoredProcedureActivity(ExecutionActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -57889,6 +60132,8 @@ def __init__( stored_procedure_name: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -57904,6 +60149,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -57923,6 +60175,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -58443,7 +60697,7 @@ class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-a :ivar connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. :vartype connection_properties: JSON - :ivar host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :ivar host: The URL of the Square instance. (i.e. mystore.mysquare.com). :vartype host: JSON :ivar client_id: The client ID associated with your Square application. :vartype client_id: JSON @@ -58463,9 +60717,8 @@ class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-a connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -58487,7 +60740,7 @@ class SquareLinkedService(LinkedService): # pylint: disable=too-many-instance-a "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -58506,7 +60759,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -58524,7 +60777,7 @@ def __init__( :keyword connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. :paramtype connection_properties: JSON - :keyword host: The URL of the Square instance. (i.e. mystore.mysquare.com). + :keyword host: The URL of the Square instance. (i.e. mystore.mysquare.com). :paramtype host: JSON :keyword client_id: The client ID associated with your Square application. :paramtype client_id: JSON @@ -58544,9 +60797,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -58777,9 +61029,11 @@ class SSISAccessCredential(_serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar domain: Domain for windows authentication. Required. + :ivar domain: Domain for windows authentication. Type: string (or Expression with resultType + string). Required. :vartype domain: JSON - :ivar user_name: UseName for windows authentication. Required. + :ivar user_name: UseName for windows authentication. Type: string (or Expression with + resultType string). Required. :vartype user_name: JSON :ivar password: Password for windows authentication. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase @@ -58799,9 +61053,11 @@ class SSISAccessCredential(_serialization.Model): def __init__(self, *, domain: JSON, user_name: JSON, password: "_models.SecretBase", **kwargs: Any) -> None: """ - :keyword domain: Domain for windows authentication. Required. + :keyword domain: Domain for windows authentication. Type: string (or Expression with resultType + string). Required. :paramtype domain: JSON - :keyword user_name: UseName for windows authentication. Required. + :keyword user_name: UseName for windows authentication. Type: string (or Expression with + resultType string). Required. :paramtype user_name: JSON :keyword password: Password for windows authentication. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase @@ -59043,9 +61299,11 @@ class SSISExecutionCredential(_serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar domain: Domain for windows authentication. Required. + :ivar domain: Domain for windows authentication. Type: string (or Expression with resultType + string). Required. :vartype domain: JSON - :ivar user_name: UseName for windows authentication. Required. + :ivar user_name: UseName for windows authentication. Type: string (or Expression with + resultType string). Required. :vartype user_name: JSON :ivar password: Password for windows authentication. Required. :vartype password: ~azure.mgmt.datafactory.models.SecureString @@ -59065,9 +61323,11 @@ class SSISExecutionCredential(_serialization.Model): def __init__(self, *, domain: JSON, user_name: JSON, password: "_models.SecureString", **kwargs: Any) -> None: """ - :keyword domain: Domain for windows authentication. Required. + :keyword domain: Domain for windows authentication. Type: string (or Expression with resultType + string). Required. :paramtype domain: JSON - :keyword user_name: UseName for windows authentication. Required. + :keyword user_name: UseName for windows authentication. Type: string (or Expression with + resultType string). Required. :paramtype user_name: JSON :keyword password: Password for windows authentication. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecureString @@ -59837,7 +62097,7 @@ def __init__( self.type = type -class SwitchActivity(ControlActivity): +class SwitchActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. @@ -59852,6 +62112,13 @@ class SwitchActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -59879,6 +62146,8 @@ class SwitchActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "on": {"key": "typeProperties.on", "type": "Expression"}, @@ -59893,6 +62162,8 @@ def __init__( on: "_models.Expression", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, cases: Optional[List["_models.SwitchCase"]] = None, @@ -59907,6 +62178,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -59926,6 +62204,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -59999,9 +62279,8 @@ class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-a :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -60023,7 +62302,7 @@ class SybaseLinkedService(LinkedService): # pylint: disable=too-many-instance-a "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -60040,7 +62319,7 @@ def __init__( authentication_type: Optional[Union[str, "_models.SybaseAuthenticationType"]] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -60073,9 +62352,8 @@ def __init__( :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -60312,6 +62590,13 @@ class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-in :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -60339,8 +62624,17 @@ class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-in of the notebook you provide. Type: string (or Expression with resultType string). :vartype driver_size: JSON :ivar num_executors: Number of executors to launch for this session, which will override the - 'numExecutors' of the notebook you provide. - :vartype num_executors: int + 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType + integer). + :vartype num_executors: JSON + :ivar configuration_type: The type of the spark config. Known values are: "Default", + "Customized", and "Artifact". + :vartype configuration_type: str or ~azure.mgmt.datafactory.models.ConfigurationType + :ivar target_spark_configuration: The spark configuration of the spark job. + :vartype target_spark_configuration: + ~azure.mgmt.datafactory.models.SparkConfigurationParametrizationReference + :ivar spark_config: Spark configuration property. + :vartype spark_config: dict[str, JSON] """ _validation = { @@ -60354,6 +62648,8 @@ class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-in "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -60364,7 +62660,13 @@ class SynapseNotebookActivity(ExecutionActivity): # pylint: disable=too-many-in "executor_size": {"key": "typeProperties.executorSize", "type": "object"}, "conf": {"key": "typeProperties.conf", "type": "object"}, "driver_size": {"key": "typeProperties.driverSize", "type": "object"}, - "num_executors": {"key": "typeProperties.numExecutors", "type": "int"}, + "num_executors": {"key": "typeProperties.numExecutors", "type": "object"}, + "configuration_type": {"key": "typeProperties.configurationType", "type": "str"}, + "target_spark_configuration": { + "key": "typeProperties.targetSparkConfiguration", + "type": "SparkConfigurationParametrizationReference", + }, + "spark_config": {"key": "typeProperties.sparkConfig", "type": "{object}"}, } def __init__( @@ -60374,6 +62676,8 @@ def __init__( notebook: "_models.SynapseNotebookReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -60383,7 +62687,10 @@ def __init__( executor_size: Optional[JSON] = None, conf: Optional[JSON] = None, driver_size: Optional[JSON] = None, - num_executors: Optional[int] = None, + num_executors: Optional[JSON] = None, + configuration_type: Optional[Union[str, "_models.ConfigurationType"]] = None, + target_spark_configuration: Optional["_models.SparkConfigurationParametrizationReference"] = None, + spark_config: Optional[Dict[str, JSON]] = None, **kwargs: Any ) -> None: """ @@ -60394,6 +62701,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -60422,13 +62736,24 @@ def __init__( string). :paramtype driver_size: JSON :keyword num_executors: Number of executors to launch for this session, which will override the - 'numExecutors' of the notebook you provide. - :paramtype num_executors: int + 'numExecutors' of the notebook you provide. Type: integer (or Expression with resultType + integer). + :paramtype num_executors: JSON + :keyword configuration_type: The type of the spark config. Known values are: "Default", + "Customized", and "Artifact". + :paramtype configuration_type: str or ~azure.mgmt.datafactory.models.ConfigurationType + :keyword target_spark_configuration: The spark configuration of the spark job. + :paramtype target_spark_configuration: + ~azure.mgmt.datafactory.models.SparkConfigurationParametrizationReference + :keyword spark_config: Spark configuration property. + :paramtype spark_config: dict[str, JSON] """ super().__init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -60443,6 +62768,9 @@ def __init__( self.conf = conf self.driver_size = driver_size self.num_executors = num_executors + self.configuration_type = configuration_type + self.target_spark_configuration = target_spark_configuration + self.spark_config = spark_config class SynapseNotebookReference(_serialization.Model): @@ -60496,6 +62824,13 @@ class SynapseSparkJobDefinitionActivity(ExecutionActivity): # pylint: disable=t :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -60572,6 +62907,8 @@ class SynapseSparkJobDefinitionActivity(ExecutionActivity): # pylint: disable=t "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -60607,6 +62944,8 @@ def __init__( # pylint: disable=too-many-locals spark_job: "_models.SynapseSparkJobReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -60636,6 +62975,13 @@ def __init__( # pylint: disable=too-many-locals :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -60705,6 +63051,8 @@ def __init__( # pylint: disable=too-many-locals additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -60996,9 +63344,8 @@ class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance :ivar api_token: The api token for the TeamDesk source. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -61019,7 +63366,7 @@ class TeamDeskLinkedService(LinkedService): # pylint: disable=too-many-instance "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -61035,7 +63382,7 @@ def __init__( user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, api_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -61065,9 +63412,8 @@ def __init__( :keyword api_token: The api token for the TeamDesk source. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -61118,9 +63464,8 @@ class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance :ivar password: Password for authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -61139,7 +63484,7 @@ class TeradataLinkedService(LinkedService): # pylint: disable=too-many-instance "authentication_type": {"key": "typeProperties.authenticationType", "type": "str"}, "username": {"key": "typeProperties.username", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -61155,7 +63500,7 @@ def __init__( authentication_type: Optional[Union[str, "_models.TeradataAuthenticationType"]] = None, username: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -61186,9 +63531,8 @@ def __init__( :keyword password: Password for authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -62229,7 +64573,8 @@ class TwilioLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar user_name: The Account SID of Twilio service. Required. + :ivar user_name: The Account SID of Twilio service. Type: string (or Expression with resultType + string). Required. :vartype user_name: JSON :ivar password: The auth token of Twilio service. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase @@ -62276,7 +64621,8 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] - :keyword user_name: The Account SID of Twilio service. Required. + :keyword user_name: The Account SID of Twilio service. Type: string (or Expression with + resultType string). Required. :paramtype user_name: JSON :keyword password: The auth token of Twilio service. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase @@ -62366,7 +64712,7 @@ def __init__( self.culture = culture -class UntilActivity(ControlActivity): +class UntilActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. @@ -62381,6 +64727,13 @@ class UntilActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -62391,8 +64744,7 @@ class UntilActivity(ControlActivity): :ivar timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with - resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :vartype timeout: JSON :ivar activities: List of activities to execute. Required. :vartype activities: list[~azure.mgmt.datafactory.models.Activity] @@ -62410,6 +64762,8 @@ class UntilActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "expression": {"key": "typeProperties.expression", "type": "Expression"}, @@ -62425,6 +64779,8 @@ def __init__( activities: List["_models.Activity"], additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, timeout: Optional[JSON] = None, @@ -62438,6 +64794,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -62448,8 +64811,7 @@ def __init__( :keyword timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: - ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with - resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :paramtype timeout: JSON :keyword activities: List of activities to execute. Required. :paramtype activities: list[~azure.mgmt.datafactory.models.Activity] @@ -62458,6 +64820,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -62641,6 +65005,13 @@ class ValidationActivity(ControlActivity): # pylint: disable=too-many-instance- :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -62675,6 +65046,8 @@ class ValidationActivity(ControlActivity): # pylint: disable=too-many-instance- "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "timeout": {"key": "typeProperties.timeout", "type": "object"}, @@ -62691,6 +65064,8 @@ def __init__( dataset: "_models.DatasetReference", additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, timeout: Optional[JSON] = None, @@ -62707,6 +65082,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -62733,6 +65115,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -62803,9 +65187,8 @@ class VerticaLinkedService(LinkedService): :ivar pwd: The Azure key vault secret reference of password in connection string. :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -62821,7 +65204,7 @@ class VerticaLinkedService(LinkedService): "annotations": {"key": "annotations", "type": "[object]"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -62834,7 +65217,7 @@ def __init__( annotations: Optional[List[JSON]] = None, connection_string: Optional[JSON] = None, pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -62855,9 +65238,8 @@ def __init__( :keyword pwd: The Azure key vault secret reference of password in connection string. :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -63111,11 +65493,19 @@ class WaitActivity(ControlActivity): :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :ivar wait_time_in_seconds: Duration in seconds. Required. + :ivar wait_time_in_seconds: Duration in seconds. Type: integer (or Expression with resultType + integer). Required. :vartype wait_time_in_seconds: JSON """ @@ -63130,6 +65520,8 @@ class WaitActivity(ControlActivity): "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "wait_time_in_seconds": {"key": "typeProperties.waitTimeInSeconds", "type": "object"}, @@ -63142,6 +65534,8 @@ def __init__( wait_time_in_seconds: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, **kwargs: Any @@ -63154,17 +65548,27 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :keyword wait_time_in_seconds: Duration in seconds. Required. + :keyword wait_time_in_seconds: Duration in seconds. Type: integer (or Expression with + resultType integer). Required. :paramtype wait_time_in_seconds: JSON """ super().__init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs @@ -63187,6 +65591,13 @@ class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attri :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. @@ -63232,6 +65643,8 @@ class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attri "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, @@ -63255,6 +65668,8 @@ def __init__( url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, @@ -63276,6 +65691,13 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. @@ -63312,6 +65734,8 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, @@ -63597,10 +66021,19 @@ class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-att :vartype type: str :ivar description: Activity description. :vartype description: str + :ivar state: Activity state. This is an optional property and if not provided, the state will + be Active by default. Known values are: "Active" and "Inactive". + :vartype state: str or ~azure.mgmt.datafactory.models.ActivityState + :ivar on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :vartype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :ivar depends_on: Activity depends on condition. :vartype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :ivar user_properties: Activity user properties. :vartype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :ivar policy: Activity policy. + :vartype policy: ~azure.mgmt.datafactory.models.SecureInputOutputPolicy :ivar method: Rest API method for target endpoint. Required. "POST" :vartype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :ivar url: WebHook activity target endpoint and path. Type: string (or Expression with @@ -63638,8 +66071,11 @@ class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-att "name": {"key": "name", "type": "str"}, "type": {"key": "type", "type": "str"}, "description": {"key": "description", "type": "str"}, + "state": {"key": "state", "type": "str"}, + "on_inactive_mark_as": {"key": "onInactiveMarkAs", "type": "str"}, "depends_on": {"key": "dependsOn", "type": "[ActivityDependency]"}, "user_properties": {"key": "userProperties", "type": "[UserProperty]"}, + "policy": {"key": "policy", "type": "SecureInputOutputPolicy"}, "method": {"key": "typeProperties.method", "type": "str"}, "url": {"key": "typeProperties.url", "type": "object"}, "timeout": {"key": "typeProperties.timeout", "type": "str"}, @@ -63657,8 +66093,11 @@ def __init__( url: JSON, additional_properties: Optional[Dict[str, JSON]] = None, description: Optional[str] = None, + state: Optional[Union[str, "_models.ActivityState"]] = None, + on_inactive_mark_as: Optional[Union[str, "_models.ActivityOnInactiveMarkAs"]] = None, depends_on: Optional[List["_models.ActivityDependency"]] = None, user_properties: Optional[List["_models.UserProperty"]] = None, + policy: Optional["_models.SecureInputOutputPolicy"] = None, timeout: Optional[str] = None, headers: Optional[JSON] = None, body: Optional[JSON] = None, @@ -63674,10 +66113,19 @@ def __init__( :paramtype name: str :keyword description: Activity description. :paramtype description: str + :keyword state: Activity state. This is an optional property and if not provided, the state + will be Active by default. Known values are: "Active" and "Inactive". + :paramtype state: str or ~azure.mgmt.datafactory.models.ActivityState + :keyword on_inactive_mark_as: Status result of the activity when the state is set to Inactive. + This is an optional property and if not provided when the activity is inactive, the status will + be Succeeded by default. Known values are: "Succeeded", "Failed", and "Skipped". + :paramtype on_inactive_mark_as: str or ~azure.mgmt.datafactory.models.ActivityOnInactiveMarkAs :keyword depends_on: Activity depends on condition. :paramtype depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :keyword user_properties: Activity user properties. :paramtype user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :keyword policy: Activity policy. + :paramtype policy: ~azure.mgmt.datafactory.models.SecureInputOutputPolicy :keyword method: Rest API method for target endpoint. Required. "POST" :paramtype method: str or ~azure.mgmt.datafactory.models.WebHookActivityMethod :keyword url: WebHook activity target endpoint and path. Type: string (or Expression with @@ -63706,11 +66154,14 @@ def __init__( additional_properties=additional_properties, name=name, description=description, + state=state, + on_inactive_mark_as=on_inactive_mark_as, depends_on=depends_on, user_properties=user_properties, **kwargs ) self.type: str = "WebHook" + self.policy = policy self.method = method self.url = url self.timeout = timeout @@ -64102,9 +66553,8 @@ class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-att connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -64125,7 +66575,7 @@ class XeroLinkedService(LinkedService): # pylint: disable=too-many-instance-att "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -64143,7 +66593,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -64181,9 +66631,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -64753,9 +67202,8 @@ class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance- :ivar api_token: The api token for the Zendesk source. :vartype api_token: ~azure.mgmt.datafactory.models.SecretBase :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -64776,7 +67224,7 @@ class ZendeskLinkedService(LinkedService): # pylint: disable=too-many-instance- "user_name": {"key": "typeProperties.userName", "type": "object"}, "password": {"key": "typeProperties.password", "type": "SecretBase"}, "api_token": {"key": "typeProperties.apiToken", "type": "SecretBase"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -64792,7 +67240,7 @@ def __init__( user_name: Optional[JSON] = None, password: Optional["_models.SecretBase"] = None, api_token: Optional["_models.SecretBase"] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -64821,9 +67269,8 @@ def __init__( :keyword api_token: The api token for the Zendesk source. :paramtype api_token: ~azure.mgmt.datafactory.models.SecretBase :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, @@ -64923,9 +67370,8 @@ class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-att connecting over SSL. The default value is true. :vartype use_peer_verification: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :vartype encrypted_credential: JSON + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str """ _validation = { @@ -64945,7 +67391,7 @@ class ZohoLinkedService(LinkedService): # pylint: disable=too-many-instance-att "use_encrypted_endpoints": {"key": "typeProperties.useEncryptedEndpoints", "type": "object"}, "use_host_verification": {"key": "typeProperties.useHostVerification", "type": "object"}, "use_peer_verification": {"key": "typeProperties.usePeerVerification", "type": "object"}, - "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } def __init__( @@ -64962,7 +67408,7 @@ def __init__( use_encrypted_endpoints: Optional[JSON] = None, use_host_verification: Optional[JSON] = None, use_peer_verification: Optional[JSON] = None, - encrypted_credential: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: """ @@ -64995,9 +67441,8 @@ def __init__( connecting over SSL. The default value is true. :paramtype use_peer_verification: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials - are encrypted using the integration runtime credential manager. Type: string (or Expression - with resultType string). - :paramtype encrypted_credential: JSON + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str """ super().__init__( additional_properties=additional_properties, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index b4c9fa93e0a9..668131aae0e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -28,6 +28,7 @@ from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations from ._global_parameters_operations import GlobalParametersOperations +from ._change_data_capture_operations import ChangeDataCaptureOperations from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import @@ -56,6 +57,7 @@ "PrivateEndpointConnectionOperations", "PrivateLinkResourcesOperations", "GlobalParametersOperations", + "ChangeDataCaptureOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk() diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py index 74ceb49a10e0..436dc42fe167 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_activity_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +41,7 @@ def build_query_by_pipeline_run_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -70,7 +66,7 @@ def build_query_by_pipeline_run_request( "runId": _SERIALIZER.url("run_id", run_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -201,16 +197,14 @@ def query_by_pipeline_run( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ActivityRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -231,8 +225,9 @@ def query_by_pipeline_run( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py new file mode 100644 index 000000000000..1cfabcc27d5d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_change_data_capture_operations.py @@ -0,0 +1,986 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from io import IOBase +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_by_factory_request( + resource_group_name: str, factory_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + subscription_id: str, + *, + if_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + subscription_id: str, + *, + if_none_match: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, factory_name: str, change_data_capture_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_start_request( + resource_group_name: str, factory_name: str, change_data_capture_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_stop_request( + resource_group_name: str, factory_name: str, change_data_capture_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_status_request( + resource_group_name: str, factory_name: str, change_data_capture_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "factoryName": _SERIALIZER.url( + "factory_name", + factory_name, + "str", + max_length=63, + min_length=3, + pattern=r"^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + ), + "changeDataCaptureName": _SERIALIZER.url( + "change_data_capture_name", + change_data_capture_name, + "str", + max_length=260, + min_length=1, + pattern=r"^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$", + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class ChangeDataCaptureOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.datafactory.DataFactoryManagementClient`'s + :attr:`change_data_capture` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list_by_factory( + self, resource_group_name: str, factory_name: str, **kwargs: Any + ) -> Iterable["_models.ChangeDataCaptureResource"]: + """Lists all resources of type change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ChangeDataCaptureResource or the result of + cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.ChangeDataCaptureResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ChangeDataCaptureListResponse] = kwargs.pop("cls", None) + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_factory_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_factory.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ChangeDataCaptureListResponse", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_factory.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: _models.ChangeDataCaptureResource, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Required. + :type change_data_capture: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: IO, + if_match: Optional[str] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Required. + :type change_data_capture: IO + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + change_data_capture: Union[_models.ChangeDataCaptureResource, IO], + if_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Creates or updates a change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param change_data_capture: Change data capture resource definition. Is either a + ChangeDataCaptureResource type or a IO type. Required. + :type change_data_capture: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource or IO + :param if_match: ETag of the change data capture entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. Default value + is None. + :type if_match: str + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ChangeDataCaptureResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(change_data_capture, (IOBase, bytes)): + _content = change_data_capture + else: + _json = self._serialize.body(change_data_capture, "ChangeDataCaptureResource") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + if_match=if_match, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace + def get( + self, + resource_group_name: str, + factory_name: str, + change_data_capture_name: str, + if_none_match: Optional[str] = None, + **kwargs: Any + ) -> _models.ChangeDataCaptureResource: + """Gets a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :param if_none_match: ETag of the change data capture entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. Default value is None. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ChangeDataCaptureResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.ChangeDataCaptureResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ChangeDataCaptureResource] = kwargs.pop("cls", None) + + request = build_get_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + if_none_match=if_none_match, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("ChangeDataCaptureResource", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Deletes a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + } + + @distributed_trace + def start( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Starts a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_start_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.start.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start" + } + + @distributed_trace + def stop( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any + ) -> None: + """Stops a change data capture. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_stop_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.stop.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop" + } + + @distributed_trace + def status(self, resource_group_name: str, factory_name: str, change_data_capture_name: str, **kwargs: Any) -> str: + """Gets the current status for the change data capture resource. + + :param resource_group_name: The resource group name. Required. + :type resource_group_name: str + :param factory_name: The factory name. Required. + :type factory_name: str + :param change_data_capture_name: The change data capture name. Required. + :type change_data_capture_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: str or the result of cls(response) + :rtype: str + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[str] = kwargs.pop("cls", None) + + request = build_status_request( + resource_group_name=resource_group_name, + factory_name=factory_name, + change_data_capture_name=change_data_capture_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.status.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize("str", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + status.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status" + } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py index 3a4b3074ace4..22909e035633 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_credential_operations_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +66,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -93,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -125,7 +121,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -152,7 +148,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -183,7 +179,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -202,7 +198,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -233,7 +229,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -283,9 +279,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.CredentialListResponse] = kwargs.pop("cls", None) error_map = { @@ -339,8 +333,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -467,16 +462,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedIdentityCredentialResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(credential, (IO, bytes)): + if isinstance(credential, (IOBase, bytes)): _content = credential else: _json = self._serialize.body(credential, "ManagedIdentityCredentialResource") @@ -498,8 +491,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -556,9 +550,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.ManagedIdentityCredentialResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -575,8 +567,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -626,9 +619,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -644,8 +635,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index 258c573a4873..9f8c65c39ec3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -30,12 +30,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +45,7 @@ def build_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -73,7 +69,7 @@ def build_create_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -92,7 +88,7 @@ def build_query_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +111,7 @@ def build_query_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -132,7 +128,7 @@ def build_add_data_flow_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -156,7 +152,7 @@ def build_add_data_flow_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -175,7 +171,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -199,7 +195,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -218,7 +214,7 @@ def build_execute_command_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -242,7 +238,7 @@ def build_execute_command_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -292,16 +288,14 @@ def _create_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[_models.CreateDataFlowDebugSessionResponse]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "CreateDataFlowDebugSessionRequest") @@ -321,8 +315,9 @@ def _create_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -457,9 +452,7 @@ def begin_create( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CreateDataFlowDebugSessionResponse] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) @@ -523,9 +516,7 @@ def query_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.QueryDataFlowDebugSessionsResponse] = kwargs.pop("cls", None) error_map = { @@ -579,8 +570,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -686,16 +678,14 @@ def add_data_flow( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.AddDataFlowToDebugSessionResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DataFlowDebugPackage") @@ -715,8 +705,9 @@ def add_data_flow( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -826,16 +817,14 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DeleteDataFlowDebugSessionRequest") @@ -855,8 +844,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -890,16 +880,14 @@ def _execute_command_initial( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[Optional[_models.DataFlowDebugCommandResponse]] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(request, (IO, bytes)): + if isinstance(request, (IOBase, bytes)): _content = request else: _json = self._serialize.body(request, "DataFlowDebugCommandRequest") @@ -919,8 +907,9 @@ def _execute_command_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1055,9 +1044,7 @@ def begin_execute_command( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataFlowDebugCommandResponse] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py index d3c93a8c7dd1..de5c52eec5d2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -53,7 +49,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -85,7 +81,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -112,7 +108,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -143,7 +139,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +158,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -193,7 +189,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -210,7 +206,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -233,7 +229,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -374,16 +370,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DataFlowResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(data_flow, (IO, bytes)): + if isinstance(data_flow, (IOBase, bytes)): _content = data_flow else: _json = self._serialize.body(data_flow, "DataFlowResource") @@ -405,8 +399,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -463,9 +458,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataFlowResource] = kwargs.pop("cls", None) request = build_get_request( @@ -482,8 +475,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -531,9 +525,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -549,8 +541,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -584,9 +577,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DataFlowListResponse] = kwargs.pop("cls", None) error_map = { @@ -640,8 +631,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py index 7d2c990b5f3c..b3b171bcccdf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_datasets_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +66,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -93,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -125,7 +121,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -152,7 +148,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -183,7 +179,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -202,7 +198,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -233,7 +229,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -281,9 +277,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.DatasetListResponse] = kwargs.pop("cls", None) error_map = { @@ -337,8 +331,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -465,16 +460,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.DatasetResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(dataset, (IO, bytes)): + if isinstance(dataset, (IOBase, bytes)): _content = dataset else: _json = self._serialize.body(dataset, "DatasetResource") @@ -496,8 +489,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -554,9 +548,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.DatasetResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -573,8 +565,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -624,9 +617,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -642,8 +633,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py index 9a16bb63b53b..5354c6e47c74 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_exposure_control_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +39,7 @@ def build_get_feature_value_request(location_id: str, subscription_id: str, **kw _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -57,7 +53,7 @@ def build_get_feature_value_request(location_id: str, subscription_id: str, **kw "locationId": _SERIALIZER.url("location_id", location_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -76,7 +72,7 @@ def build_get_feature_value_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -100,7 +96,7 @@ def build_get_feature_value_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -119,7 +115,7 @@ def build_query_feature_values_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -143,7 +139,7 @@ def build_query_feature_values_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -248,16 +244,14 @@ def get_feature_value( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_request, (IO, bytes)): + if isinstance(exposure_control_request, (IOBase, bytes)): _content = exposure_control_request else: _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") @@ -276,8 +270,9 @@ def get_feature_value( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -387,16 +382,14 @@ def get_feature_value_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_request, (IO, bytes)): + if isinstance(exposure_control_request, (IOBase, bytes)): _content = exposure_control_request else: _json = self._serialize.body(exposure_control_request, "ExposureControlRequest") @@ -416,8 +409,9 @@ def get_feature_value_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -531,16 +525,14 @@ def query_feature_values_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ExposureControlBatchResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(exposure_control_batch_request, (IO, bytes)): + if isinstance(exposure_control_batch_request, (IOBase, bytes)): _content = exposure_control_batch_request else: _json = self._serialize.body(exposure_control_batch_request, "ExposureControlBatchRequest") @@ -560,8 +552,9 @@ def query_feature_values_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py index fcf952730729..e20ead3b6fe8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +41,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -54,7 +50,7 @@ def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -69,7 +65,7 @@ def build_configure_factory_repo_request(location_id: str, subscription_id: str, _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -83,7 +79,7 @@ def build_configure_factory_repo_request(location_id: str, subscription_id: str, "locationId": _SERIALIZER.url("location_id", location_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -100,7 +96,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -115,7 +111,7 @@ def build_list_by_resource_group_request(resource_group_name: str, subscription_ ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -132,7 +128,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -156,7 +152,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -177,7 +173,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -201,7 +197,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -225,7 +221,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -248,7 +244,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -267,7 +263,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -290,7 +286,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -307,7 +303,7 @@ def build_get_git_hub_access_token_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -331,7 +327,7 @@ def build_get_git_hub_access_token_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -350,7 +346,7 @@ def build_get_data_plane_access_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -374,7 +370,7 @@ def build_get_data_plane_access_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -418,9 +414,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Factory"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) error_map = { @@ -472,8 +466,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -560,16 +555,14 @@ def configure_factory_repo( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory_repo_update, (IO, bytes)): + if isinstance(factory_repo_update, (IOBase, bytes)): _content = factory_repo_update else: _json = self._serialize.body(factory_repo_update, "FactoryRepoUpdate") @@ -588,8 +581,9 @@ def configure_factory_repo( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -623,9 +617,7 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) error_map = { @@ -678,8 +670,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -796,16 +789,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory, (IO, bytes)): + if isinstance(factory, (IOBase, bytes)): _content = factory else: _json = self._serialize.body(factory, "Factory") @@ -826,8 +817,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -937,16 +929,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.Factory] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(factory_update_parameters, (IO, bytes)): + if isinstance(factory_update_parameters, (IOBase, bytes)): _content = factory_update_parameters else: _json = self._serialize.body(factory_update_parameters, "FactoryUpdateParameters") @@ -966,8 +956,9 @@ def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1017,9 +1008,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.Factory]] = kwargs.pop("cls", None) request = build_get_request( @@ -1035,8 +1024,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1084,9 +1074,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -1101,8 +1089,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1209,16 +1198,14 @@ def get_git_hub_access_token( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.GitHubAccessTokenResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(git_hub_access_token_request, (IO, bytes)): + if isinstance(git_hub_access_token_request, (IOBase, bytes)): _content = git_hub_access_token_request else: _json = self._serialize.body(git_hub_access_token_request, "GitHubAccessTokenRequest") @@ -1238,8 +1225,9 @@ def get_git_hub_access_token( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1345,16 +1333,14 @@ def get_data_plane_access( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.AccessPolicyResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(policy, (IO, bytes)): + if isinstance(policy, (IOBase, bytes)): _content = policy else: _json = self._serialize.body(policy, "UserAccessPolicy") @@ -1374,8 +1360,9 @@ def get_data_plane_access( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py index 0153454c068b..5a27f053683e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +66,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -87,7 +83,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -118,7 +114,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -135,7 +131,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -167,7 +163,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -186,7 +182,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -217,7 +213,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -266,9 +262,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.GlobalParameterListResponse] = kwargs.pop("cls", None) error_map = { @@ -322,8 +316,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -367,9 +362,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.GlobalParameterResource] = kwargs.pop("cls", None) request = build_get_request( @@ -385,8 +378,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -505,16 +499,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.GlobalParameterResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(default, (IO, bytes)): + if isinstance(default, (IOBase, bytes)): _content = default else: _json = self._serialize.body(default, "GlobalParameterResource") @@ -535,8 +527,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -584,9 +577,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -602,8 +593,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py index 4103c203d8b7..01d93b4e8ca4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_nodes_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -50,7 +46,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -84,7 +80,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -106,7 +102,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -140,7 +136,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -162,7 +158,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -197,7 +193,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -221,7 +217,7 @@ def build_get_ip_address_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -255,7 +251,7 @@ def build_get_ip_address_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -315,9 +311,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SelfHostedIntegrationRuntimeNode] = kwargs.pop("cls", None) request = build_get_request( @@ -334,8 +328,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -385,9 +380,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -404,8 +397,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -533,16 +527,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SelfHostedIntegrationRuntimeNode] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(update_integration_runtime_node_request, (IO, bytes)): + if isinstance(update_integration_runtime_node_request, (IOBase, bytes)): _content = update_integration_runtime_node_request else: _json = self._serialize.body(update_integration_runtime_node_request, "UpdateIntegrationRuntimeNodeRequest") @@ -564,8 +556,9 @@ def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -615,9 +608,7 @@ def get_ip_address( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeNodeIpAddress] = kwargs.pop("cls", None) request = build_get_ip_address_request( @@ -634,8 +625,9 @@ def get_ip_address( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py index ba5ae41d0c63..1ed7fe0c9768 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload from azure.core.exceptions import ( @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_refresh_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -78,7 +74,7 @@ def build_refresh_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -95,7 +91,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -127,7 +123,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -173,9 +169,7 @@ def _refresh_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.SsisObjectMetadataStatusResponse]] = kwargs.pop("cls", None) request = build_refresh_request( @@ -191,8 +185,9 @@ def _refresh_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -243,9 +238,7 @@ def begin_refresh( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SsisObjectMetadataStatusResponse] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -392,16 +385,14 @@ def get( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SsisObjectMetadataListResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(get_metadata_request, (IO, bytes)): + if isinstance(get_metadata_request, (IOBase, bytes)): _content = get_metadata_request else: if get_metadata_request is not None: @@ -425,8 +416,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py index bd1aa86d231f..dcc32ce24b5a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_integration_runtimes_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -30,12 +30,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +45,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +68,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -95,7 +91,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -127,7 +123,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -154,7 +150,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -185,7 +181,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -204,7 +200,7 @@ def build_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -236,7 +232,7 @@ def build_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -255,7 +251,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -286,7 +282,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -303,7 +299,7 @@ def build_get_status_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -334,7 +330,7 @@ def build_get_status_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -351,7 +347,7 @@ def build_list_outbound_network_dependencies_endpoints_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -382,7 +378,7 @@ def build_list_outbound_network_dependencies_endpoints_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -399,7 +395,7 @@ def build_get_connection_info_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -430,7 +426,7 @@ def build_get_connection_info_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -447,7 +443,7 @@ def build_regenerate_auth_key_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -479,7 +475,7 @@ def build_regenerate_auth_key_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -498,7 +494,7 @@ def build_list_auth_keys_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -529,7 +525,7 @@ def build_list_auth_keys_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -546,7 +542,7 @@ def build_start_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -577,7 +573,7 @@ def build_start_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -594,7 +590,7 @@ def build_stop_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -625,7 +621,7 @@ def build_stop_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -642,7 +638,7 @@ def build_sync_credentials_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -673,7 +669,7 @@ def build_sync_credentials_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -690,7 +686,7 @@ def build_get_monitoring_data_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -721,7 +717,7 @@ def build_get_monitoring_data_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -738,7 +734,7 @@ def build_upgrade_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -769,7 +765,7 @@ def build_upgrade_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -786,7 +782,7 @@ def build_remove_links_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -818,7 +814,7 @@ def build_remove_links_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -837,7 +833,7 @@ def build_create_linked_integration_runtime_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -869,7 +865,7 @@ def build_create_linked_integration_runtime_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -920,9 +916,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeListResponse] = kwargs.pop("cls", None) error_map = { @@ -976,8 +970,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1107,16 +1102,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(integration_runtime, (IO, bytes)): + if isinstance(integration_runtime, (IOBase, bytes)): _content = integration_runtime else: _json = self._serialize.body(integration_runtime, "IntegrationRuntimeResource") @@ -1138,8 +1131,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1196,9 +1190,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.IntegrationRuntimeResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -1215,8 +1207,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1341,16 +1334,14 @@ def update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(update_integration_runtime_request, (IO, bytes)): + if isinstance(update_integration_runtime_request, (IOBase, bytes)): _content = update_integration_runtime_request else: _json = self._serialize.body(update_integration_runtime_request, "UpdateIntegrationRuntimeRequest") @@ -1371,8 +1362,9 @@ def update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1420,9 +1412,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -1438,8 +1428,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1483,9 +1474,7 @@ def get_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) request = build_get_status_request( @@ -1501,8 +1490,9 @@ def get_status( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1552,9 +1542,7 @@ def list_outbound_network_dependencies_endpoints( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse] = kwargs.pop("cls", None) request = build_list_outbound_network_dependencies_endpoints_request( @@ -1570,8 +1558,9 @@ def list_outbound_network_dependencies_endpoints( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1622,9 +1611,7 @@ def get_connection_info( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeConnectionInfo] = kwargs.pop("cls", None) request = build_get_connection_info_request( @@ -1640,8 +1627,9 @@ def get_connection_info( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1765,16 +1753,14 @@ def regenerate_auth_key( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeAuthKeys] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(regenerate_key_parameters, (IO, bytes)): + if isinstance(regenerate_key_parameters, (IOBase, bytes)): _content = regenerate_key_parameters else: _json = self._serialize.body(regenerate_key_parameters, "IntegrationRuntimeRegenerateKeyParameters") @@ -1795,8 +1781,9 @@ def regenerate_auth_key( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1844,9 +1831,7 @@ def list_auth_keys( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeAuthKeys] = kwargs.pop("cls", None) request = build_list_auth_keys_request( @@ -1862,8 +1847,9 @@ def list_auth_keys( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1897,9 +1883,7 @@ def _start_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.IntegrationRuntimeStatusResponse]] = kwargs.pop("cls", None) request = build_start_request( @@ -1915,8 +1899,9 @@ def _start_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1967,9 +1952,7 @@ def begin_start( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -2026,9 +2009,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( @@ -2044,8 +2025,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -2088,9 +2070,7 @@ def begin_stop( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -2162,9 +2142,7 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_sync_credentials_request( @@ -2180,8 +2158,9 @@ def sync_credentials( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -2226,9 +2205,7 @@ def get_monitoring_data( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.IntegrationRuntimeMonitoringData] = kwargs.pop("cls", None) request = build_get_monitoring_data_request( @@ -2244,8 +2221,9 @@ def get_monitoring_data( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -2293,9 +2271,7 @@ def upgrade( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_upgrade_request( @@ -2311,8 +2287,9 @@ def upgrade( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -2434,16 +2411,14 @@ def remove_links( # pylint: disable=inconsistent-return-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[None] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(linked_integration_runtime_request, (IO, bytes)): + if isinstance(linked_integration_runtime_request, (IOBase, bytes)): _content = linked_integration_runtime_request else: _json = self._serialize.body(linked_integration_runtime_request, "LinkedIntegrationRuntimeRequest") @@ -2464,8 +2439,9 @@ def remove_links( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -2584,16 +2560,14 @@ def create_linked_integration_runtime( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.IntegrationRuntimeStatusResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(create_linked_integration_runtime_request, (IO, bytes)): + if isinstance(create_linked_integration_runtime_request, (IOBase, bytes)): _content = create_linked_integration_runtime_request else: _json = self._serialize.body( @@ -2616,8 +2590,9 @@ def create_linked_integration_runtime( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py index 3cbe58ea4e93..6b21244a5d39 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_linked_services_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +66,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -93,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -125,7 +121,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -152,7 +148,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -183,7 +179,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -202,7 +198,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -233,7 +229,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -282,9 +278,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.LinkedServiceListResponse] = kwargs.pop("cls", None) error_map = { @@ -338,8 +332,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -469,16 +464,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.LinkedServiceResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(linked_service, (IO, bytes)): + if isinstance(linked_service, (IOBase, bytes)): _content = linked_service else: _json = self._serialize.body(linked_service, "LinkedServiceResource") @@ -500,8 +493,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -558,9 +552,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.LinkedServiceResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -577,8 +569,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -628,9 +621,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -646,8 +637,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py index 02387ca70f3e..c3d39b41122d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_private_endpoints_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -78,7 +74,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -102,7 +98,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -142,7 +138,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -170,7 +166,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -209,7 +205,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -233,7 +229,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -272,7 +268,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -324,9 +320,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedPrivateEndpointListResponse] = kwargs.pop("cls", None) error_map = { @@ -381,8 +375,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -522,16 +517,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedPrivateEndpointResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(managed_private_endpoint, (IO, bytes)): + if isinstance(managed_private_endpoint, (IOBase, bytes)): _content = managed_private_endpoint else: _json = self._serialize.body(managed_private_endpoint, "ManagedPrivateEndpointResource") @@ -554,8 +547,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -615,9 +609,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedPrivateEndpointResource] = kwargs.pop("cls", None) request = build_get_request( @@ -635,8 +627,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -691,9 +684,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -710,8 +701,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py index 7bc1159469b6..e6d09a11ca21 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_managed_virtual_networks_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,12 +28,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +43,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +66,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -93,7 +89,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -125,7 +121,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -152,7 +148,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -183,7 +179,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -235,9 +231,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedVirtualNetworkListResponse] = kwargs.pop("cls", None) error_map = { @@ -291,8 +285,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -423,16 +418,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.ManagedVirtualNetworkResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(managed_virtual_network, (IO, bytes)): + if isinstance(managed_virtual_network, (IOBase, bytes)): _content = managed_virtual_network else: _json = self._serialize.body(managed_virtual_network, "ManagedVirtualNetworkResource") @@ -454,8 +447,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -512,9 +506,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ManagedVirtualNetworkResource] = kwargs.pop("cls", None) request = build_get_request( @@ -531,8 +523,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py index 8af29aa5b695..b4a19caa42d1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, Callable, Dict, Iterable, Optional, TypeVar import urllib.parse @@ -30,10 +29,6 @@ from .._serialization import Serializer from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +40,7 @@ def build_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -91,9 +86,7 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.OperationListResponse] = kwargs.pop("cls", None) error_map = { @@ -144,8 +137,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py index 73723a0b09c2..32f31dd80373 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipeline_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +41,7 @@ def build_query_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -69,7 +65,7 @@ def build_query_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -88,7 +84,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -112,7 +108,7 @@ def build_get_request( "runId": _SERIALIZER.url("run_id", run_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -135,7 +131,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -159,7 +155,7 @@ def build_cancel_request( "runId": _SERIALIZER.url("run_id", run_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters if is_recursive is not None: @@ -281,16 +277,14 @@ def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PipelineRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -310,8 +304,9 @@ def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -357,9 +352,7 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PipelineRun] = kwargs.pop("cls", None) request = build_get_request( @@ -375,8 +368,9 @@ def get(self, resource_group_name: str, factory_name: str, run_id: str, **kwargs request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -432,9 +426,7 @@ def cancel( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_cancel_request( @@ -451,8 +443,9 @@ def cancel( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index b52f507dd9cb..908515d24732 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -6,6 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from io import IOBase import sys from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload import urllib.parse @@ -28,16 +29,12 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -52,7 +49,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -75,7 +72,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -98,7 +95,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -130,7 +127,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -157,7 +154,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -188,7 +185,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -207,7 +204,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -238,7 +235,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -264,7 +261,7 @@ def build_create_run_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -296,7 +293,7 @@ def build_create_run_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -356,9 +353,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PipelineListResponse] = kwargs.pop("cls", None) error_map = { @@ -412,8 +407,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -540,16 +536,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PipelineResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(pipeline, (IO, bytes)): + if isinstance(pipeline, (IOBase, bytes)): _content = pipeline else: _json = self._serialize.body(pipeline, "PipelineResource") @@ -571,8 +565,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -629,9 +624,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.PipelineResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -648,8 +641,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -699,9 +693,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -717,8 +709,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -889,16 +882,14 @@ def create_run( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.CreateRunResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(parameters, (IO, bytes)): + if isinstance(parameters, (IOBase, bytes)): _content = parameters else: if parameters is not None: @@ -926,8 +917,9 @@ def create_run( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py index 86407b53472d..5f0ae6b7412d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, Callable, Dict, Iterable, Optional, TypeVar import urllib.parse @@ -28,12 +27,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,7 +42,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -70,7 +65,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -120,9 +115,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointConnectionListResponse] = kwargs.pop("cls", None) error_map = { @@ -176,8 +169,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py index e47ec5607375..258ae88e6706 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,7 +47,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -78,7 +74,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -105,7 +101,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -131,7 +127,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -154,7 +150,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -180,7 +176,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -326,16 +322,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.PrivateEndpointConnectionResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(private_endpoint_wrapper, (IO, bytes)): + if isinstance(private_endpoint_wrapper, (IOBase, bytes)): _content = private_endpoint_wrapper else: _json = self._serialize.body(private_endpoint_wrapper, "PrivateLinkConnectionApprovalRequestResource") @@ -357,8 +351,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -415,9 +410,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateEndpointConnectionResource] = kwargs.pop("cls", None) request = build_get_request( @@ -434,8 +427,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -483,9 +477,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -501,8 +493,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py index c41c64876eaa..3241fc89e3a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py @@ -6,7 +6,6 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ( @@ -26,12 +25,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -43,7 +38,7 @@ def build_get_request(resource_group_name: str, factory_name: str, subscription_ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -66,7 +61,7 @@ def build_get_request(resource_group_name: str, factory_name: str, subscription_ ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -120,9 +115,7 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.PrivateLinkResourcesWrapper] = kwargs.pop("cls", None) request = build_get_request( @@ -137,8 +130,9 @@ def get(self, resource_group_name: str, factory_name: str, **kwargs: Any) -> _mo request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py index 2610f456a071..b90e94cd0c05 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_trigger_runs_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload from azure.core.exceptions import ( @@ -26,12 +26,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -45,7 +41,7 @@ def build_rerun_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -77,7 +73,7 @@ def build_rerun_request( "runId": _SERIALIZER.url("run_id", run_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -94,7 +90,7 @@ def build_cancel_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -126,7 +122,7 @@ def build_cancel_request( "runId": _SERIALIZER.url("run_id", run_id, "str"), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -143,7 +139,7 @@ def build_query_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -167,7 +163,7 @@ def build_query_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -229,9 +225,7 @@ def rerun( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_rerun_request( @@ -248,8 +242,9 @@ def rerun( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -295,9 +290,7 @@ def cancel( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_cancel_request( @@ -314,8 +307,9 @@ def cancel( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -421,16 +415,14 @@ def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerRunsQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "RunFilterParameters") @@ -450,8 +442,9 @@ def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index a9d9c64898a7..4119898fcf62 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -6,7 +6,7 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import sys +from io import IOBase from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload import urllib.parse @@ -30,12 +30,8 @@ from .. import models as _models from .._serialization import Serializer -from .._vendor import _convert_request, _format_url_section +from .._vendor import _convert_request -if sys.version_info >= (3, 8): - from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports -else: - from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -49,7 +45,7 @@ def build_list_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,7 +68,7 @@ def build_list_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -89,7 +85,7 @@ def build_query_by_factory_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -113,7 +109,7 @@ def build_query_by_factory_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -138,7 +134,7 @@ def build_create_or_update_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) accept = _headers.pop("Accept", "application/json") @@ -170,7 +166,7 @@ def build_create_or_update_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -197,7 +193,7 @@ def build_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -228,7 +224,7 @@ def build_get_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -247,7 +243,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -278,7 +274,7 @@ def build_delete_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -295,7 +291,7 @@ def build_subscribe_to_events_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -326,7 +322,7 @@ def build_subscribe_to_events_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -343,7 +339,7 @@ def build_get_event_subscription_status_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -374,7 +370,7 @@ def build_get_event_subscription_status_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -391,7 +387,7 @@ def build_unsubscribe_from_events_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -422,7 +418,7 @@ def build_unsubscribe_from_events_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -439,7 +435,7 @@ def build_start_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -470,7 +466,7 @@ def build_start_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -487,7 +483,7 @@ def build_stop_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -518,7 +514,7 @@ def build_stop_request( ), } - _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") @@ -566,9 +562,7 @@ def list_by_factory( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerListResponse] = kwargs.pop("cls", None) error_map = { @@ -622,8 +616,9 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -729,16 +724,14 @@ def query_by_factory( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerQueryResponse] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(filter_parameters, (IO, bytes)): + if isinstance(filter_parameters, (IOBase, bytes)): _content = filter_parameters else: _json = self._serialize.body(filter_parameters, "TriggerFilterParameters") @@ -758,8 +751,9 @@ def query_by_factory( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -890,16 +884,14 @@ def create_or_update( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.TriggerResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None - if isinstance(trigger, (IO, bytes)): + if isinstance(trigger, (IOBase, bytes)): _content = trigger else: _json = self._serialize.body(trigger, "TriggerResource") @@ -921,8 +913,9 @@ def create_or_update( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -979,9 +972,7 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerResource]] = kwargs.pop("cls", None) request = build_get_request( @@ -998,8 +989,9 @@ def get( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1049,9 +1041,7 @@ def delete( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( @@ -1067,8 +1057,9 @@ def delete( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1098,9 +1089,7 @@ def _subscribe_to_events_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) request = build_subscribe_to_events_request( @@ -1116,8 +1105,9 @@ def _subscribe_to_events_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1168,9 +1158,7 @@ def begin_subscribe_to_events( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1241,9 +1229,7 @@ def get_event_subscription_status( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) request = build_get_event_subscription_status_request( @@ -1259,8 +1245,9 @@ def get_event_subscription_status( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1294,9 +1281,7 @@ def _unsubscribe_from_events_initial( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[Optional[_models.TriggerSubscriptionOperationStatus]] = kwargs.pop("cls", None) request = build_unsubscribe_from_events_request( @@ -1312,8 +1297,9 @@ def _unsubscribe_from_events_initial( request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1364,9 +1350,7 @@ def begin_unsubscribe_from_events( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.TriggerSubscriptionOperationStatus] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1423,9 +1407,7 @@ def _start_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_start_request( @@ -1441,8 +1423,9 @@ def _start_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1485,9 +1468,7 @@ def begin_start( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) @@ -1542,9 +1523,7 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) request = build_stop_request( @@ -1560,8 +1539,9 @@ def _stop_initial( # pylint: disable=inconsistent-return-statements request = _convert_request(request) request.url = self._client.format_url(request.url) + _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - request, stream=False, **kwargs + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1604,9 +1584,7 @@ def begin_stop( _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: Literal["2018-06-01"] = kwargs.pop( - "api_version", _params.pop("api-version", self._config.api_version) - ) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[None] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py new file mode 100644 index 000000000000..fe3bd994602a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py @@ -0,0 +1,441 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_create.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + response = client.change_data_capture.create_or_update( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + change_data_capture={ + "properties": { + "Policy": {"mode": "Microbatch", "recurrence": {"frequency": "Minute", "interval": 15}}, + "SourceConnectionsInfo": [ + { + "Connection": { + "commonDslConnectorProperties": [ + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + {"name": "format", "value": "delimited"}, + { + "name": "dateFormats", + "value": [ + "MM/dd/yyyy", + "dd/MM/yyyy", + "yyyy/MM/dd", + "MM-dd-yyyy", + "dd-MM-yyyy", + "yyyy-MM-dd", + "dd.MM.yyyy", + "MM.dd.yyyy", + "yyyy.MM.dd", + ], + }, + { + "name": "timestampFormats", + "value": [ + "yyyyMMddHHmm", + "yyyyMMdd HHmm", + "yyyyMMddHHmmss", + "yyyyMMdd HHmmss", + "dd-MM-yyyy HH:mm:ss", + "dd-MM-yyyy HH:mm", + "yyyy-M-d H:m:s", + "yyyy-MM-dd\\'T\\'HH:mm:ss\\'Z\\'", + "yyyy-M-d\\'T\\'H:m:s\\'Z\\'", + "yyyy-M-d\\'T\\'H:m:s", + "yyyy-MM-dd\\'T\\'HH:mm:ss", + "yyyy-MM-dd HH:mm:ss", + "yyyy-MM-dd HH:mm", + "yyyy.MM.dd HH:mm:ss", + "MM/dd/yyyy HH:mm:ss", + "M/d/yyyy H:m:s", + "yyyy/MM/dd HH:mm:ss", + "yyyy/M/d H:m:s", + "dd MMM yyyy HH:mm:ss", + "dd MMMM yyyy HH:mm:ss", + "d MMM yyyy H:m:s", + "d MMMM yyyy H:m:s", + "d-M-yyyy H:m:s", + "d-M-yyyy H:m", + "yyyy-M-d H:m", + "MM/dd/yyyy HH:mm", + "M/d/yyyy H:m", + "yyyy/MM/dd HH:mm", + "yyyy/M/d H:m", + "dd MMMM yyyy HH:mm", + "dd MMM yyyy HH:mm", + "d MMMM yyyy H:m", + "d MMM yyyy H:m", + "MM-dd-yyyy hh:mm:ss a", + "MM-dd-yyyy HH:mm:ss", + "MM/dd/yyyy hh:mm:ss a", + "yyyy.MM.dd hh:mm:ss a", + "MM/dd/yyyy", + "dd/MM/yyyy", + "yyyy/MM/dd", + "MM-dd-yyyy", + "dd-MM-yyyy", + "yyyy-MM-dd", + "dd.MM.yyyy", + "MM.dd.yyyy", + "yyyy.MM.dd", + ], + }, + {"name": "enableCdc", "value": True}, + {"name": "skipInitialLoad", "value": True}, + {"name": "columnNamesAsHeader", "value": True}, + {"name": "columnDelimiter", "value": ","}, + {"name": "escapeChar", "value": "\\\\"}, + {"name": "quoteChar", "value": '\\"'}, + ], + "isInlineDataset": True, + "linkedService": {"referenceName": "amjaAdls03", "type": "LinkedServiceReference"}, + "linkedServiceType": "AzureBlobFS", + "type": "linkedservicetype", + }, + "SourceEntities": [ + { + "name": "source/customer", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "customer"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "short", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDepName"}, + {"dataType": "string", "name": "CustDepLoc"}, + ], + }, + }, + { + "name": "source/employee", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "employee"}, + ], + "schema": [], + }, + }, + { + "name": "lookup", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "lookup"}, + {"name": "fileSystem", "value": "lookup"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "short", "name": "EmpId"}, + {"dataType": "string", "name": "EmpName"}, + {"dataType": "string", "name": "HomeAddress"}, + {"dataType": "string", "name": "OfficeAddress"}, + {"dataType": "integer", "name": "EmpPhoneNumber"}, + {"dataType": "string", "name": "DepName"}, + {"dataType": "string", "name": "DepLoc"}, + {"dataType": "double", "name": "DecimalCol"}, + ], + }, + }, + { + "name": "source/justSchema", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "justSchema"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "string", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDepName"}, + {"dataType": "string", "name": "CustDepLoc"}, + ], + }, + }, + ], + } + ], + "TargetConnectionsInfo": [ + { + "Connection": { + "commonDslConnectorProperties": [ + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + {"name": "format", "value": "table"}, + {"name": "store", "value": "sqlserver"}, + {"name": "databaseType", "value": "databaseType"}, + {"name": "database", "value": "database"}, + {"name": "deletable", "value": False}, + {"name": "insertable", "value": True}, + {"name": "updateable", "value": False}, + {"name": "upsertable", "value": False}, + {"name": "skipDuplicateMapInputs", "value": True}, + {"name": "skipDuplicateMapOutputs", "value": True}, + ], + "isInlineDataset": True, + "linkedService": {"referenceName": "amjaSql", "type": "LinkedServiceReference"}, + "linkedServiceType": "AzureSqlDatabase", + "type": "linkedservicetype", + }, + "DataMapperMappings": [ + { + "attributeMappingInfo": {"attributeMappings": []}, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/customer", + "targetEntityName": "dbo.customer", + }, + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReferences": [ + { + "entity": "lookup", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "EmpName", + } + ], + "expression": "upper(EmpName)", + "functionName": "upper", + "name": "Name", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "lookup", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "EmpId", + }, + "functionName": "", + "name": "PersonID", + "type": "Direct", + }, + ] + }, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "lookup", + "targetEntityName": "dbo.data_source_table", + }, + { + "attributeMappingInfo": {"attributeMappings": []}, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/employee", + "targetEntityName": "dbo.employee", + }, + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReferences": [ + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustAddres", + } + ], + "expression": "trim(CustAddres)", + "functionName": "trim", + "name": "CustAddres", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustDepLoc", + }, + "name": "CustDepLoc", + "type": "Direct", + }, + { + "attributeReferences": [ + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustDepName", + }, + ], + "expression": 'concat(CustName, " -> ", CustDepName)', + "functionName": "", + "name": "CustDepName", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustId", + }, + "functionName": "", + "name": "CustId", + "type": "Direct", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + "name": "CustName", + "type": "Direct", + }, + ] + }, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/justSchema", + "targetEntityName": "dbo.justSchema", + }, + ], + "Relationships": [], + "TargetEntities": [ + { + "name": "dbo.employee", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "employee"}, + ], + "schema": [], + }, + }, + { + "name": "dbo.justSchema", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "justSchema"}, + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + ], + "schema": [], + }, + }, + { + "name": "dbo.customer", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "customer"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "integer", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDeptName"}, + {"dataType": "string", "name": "CustEmail"}, + ], + }, + }, + { + "name": "dbo.data_source_table", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "data_source_table"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + {"name": "defaultToUpsert", "value": False}, + ], + "schema": [ + {"dataType": "integer", "name": "PersonID"}, + {"dataType": "string", "name": "Name"}, + {"dataType": "timestamp", "name": "LastModifytime"}, + ], + }, + }, + ], + } + ], + "allowVNetOverride": False, + "description": "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database with automapped and non-automapped mappings.", + } + }, + ) + print(response) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Create.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_delete.py new file mode 100644 index 000000000000..033f962e958f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_delete.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + client.change_data_capture.delete( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + ) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Delete.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_get.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_get.py new file mode 100644 index 000000000000..7b8af1650b25 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_get.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + response = client.change_data_capture.get( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + ) + print(response) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_list_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_list_by_factory.py new file mode 100644 index 000000000000..aec672773151 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_list_by_factory.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_list_by_factory.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + response = client.change_data_capture.list_by_factory( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_ListByFactory.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_start.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_start.py new file mode 100644 index 000000000000..6f37e40fb386 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_start.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_start.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + client.change_data_capture.start( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + ) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Start.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_status.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_status.py new file mode 100644 index 000000000000..dd5e52093f6c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_status.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_status.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + response = client.change_data_capture.status( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + ) + print(response) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Status.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_stop.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_stop.py new file mode 100644 index 000000000000..142c11646737 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_stop.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_stop.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + client.change_data_capture.stop( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + ) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Stop.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py new file mode 100644 index 000000000000..1baa44702d8a --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py @@ -0,0 +1,513 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.datafactory import DataFactoryManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-datafactory +# USAGE + python change_data_capture_update.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = DataFactoryManagementClient( + credential=DefaultAzureCredential(), + subscription_id="12345678-1234-1234-1234-12345678abc", + ) + + response = client.change_data_capture.create_or_update( + resource_group_name="exampleResourceGroup", + factory_name="exampleFactoryName", + change_data_capture_name="exampleChangeDataCapture", + change_data_capture={ + "properties": { + "Policy": {"mode": "Microbatch", "recurrence": {"frequency": "Minute", "interval": 15}}, + "SourceConnectionsInfo": [ + { + "Connection": { + "commonDslConnectorProperties": [ + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + {"name": "format", "value": "delimited"}, + { + "name": "dateFormats", + "value": [ + "MM/dd/yyyy", + "dd/MM/yyyy", + "yyyy/MM/dd", + "MM-dd-yyyy", + "dd-MM-yyyy", + "yyyy-MM-dd", + "dd.MM.yyyy", + "MM.dd.yyyy", + "yyyy.MM.dd", + ], + }, + { + "name": "timestampFormats", + "value": [ + "yyyyMMddHHmm", + "yyyyMMdd HHmm", + "yyyyMMddHHmmss", + "yyyyMMdd HHmmss", + "dd-MM-yyyy HH:mm:ss", + "dd-MM-yyyy HH:mm", + "yyyy-M-d H:m:s", + "yyyy-MM-dd\\'T\\'HH:mm:ss\\'Z\\'", + "yyyy-M-d\\'T\\'H:m:s\\'Z\\'", + "yyyy-M-d\\'T\\'H:m:s", + "yyyy-MM-dd\\'T\\'HH:mm:ss", + "yyyy-MM-dd HH:mm:ss", + "yyyy-MM-dd HH:mm", + "yyyy.MM.dd HH:mm:ss", + "MM/dd/yyyy HH:mm:ss", + "M/d/yyyy H:m:s", + "yyyy/MM/dd HH:mm:ss", + "yyyy/M/d H:m:s", + "dd MMM yyyy HH:mm:ss", + "dd MMMM yyyy HH:mm:ss", + "d MMM yyyy H:m:s", + "d MMMM yyyy H:m:s", + "d-M-yyyy H:m:s", + "d-M-yyyy H:m", + "yyyy-M-d H:m", + "MM/dd/yyyy HH:mm", + "M/d/yyyy H:m", + "yyyy/MM/dd HH:mm", + "yyyy/M/d H:m", + "dd MMMM yyyy HH:mm", + "dd MMM yyyy HH:mm", + "d MMMM yyyy H:m", + "d MMM yyyy H:m", + "MM-dd-yyyy hh:mm:ss a", + "MM-dd-yyyy HH:mm:ss", + "MM/dd/yyyy hh:mm:ss a", + "yyyy.MM.dd hh:mm:ss a", + "MM/dd/yyyy", + "dd/MM/yyyy", + "yyyy/MM/dd", + "MM-dd-yyyy", + "dd-MM-yyyy", + "yyyy-MM-dd", + "dd.MM.yyyy", + "MM.dd.yyyy", + "yyyy.MM.dd", + ], + }, + {"name": "enableCdc", "value": True}, + {"name": "skipInitialLoad", "value": True}, + {"name": "columnNamesAsHeader", "value": True}, + {"name": "columnDelimiter", "value": ","}, + {"name": "escapeChar", "value": "\\\\"}, + {"name": "quoteChar", "value": '\\"'}, + ], + "isInlineDataset": True, + "linkedService": {"referenceName": "amjaAdls03", "type": "LinkedServiceReference"}, + "linkedServiceType": "AzureBlobFS", + "type": "linkedservicetype", + }, + "SourceEntities": [ + { + "name": "source/customer", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "customer"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "short", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDepName"}, + {"dataType": "string", "name": "CustDepLoc"}, + ], + }, + }, + { + "name": "source/employee", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "employee"}, + ], + "schema": [], + }, + }, + { + "name": "lookup", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "lookup"}, + {"name": "fileSystem", "value": "lookup"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "short", "name": "EmpId"}, + {"dataType": "string", "name": "EmpName"}, + {"dataType": "string", "name": "HomeAddress"}, + {"dataType": "string", "name": "OfficeAddress"}, + {"dataType": "integer", "name": "EmpPhoneNumber"}, + {"dataType": "string", "name": "DepName"}, + {"dataType": "string", "name": "DepLoc"}, + {"dataType": "double", "name": "DecimalCol"}, + ], + }, + }, + { + "name": "source/justSchema", + "properties": { + "dslConnectorProperties": [ + {"name": "container", "value": "source"}, + {"name": "fileSystem", "value": "source"}, + {"name": "folderPath", "value": "justSchema"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "string", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDepName"}, + {"dataType": "string", "name": "CustDepLoc"}, + ], + }, + }, + ], + } + ], + "TargetConnectionsInfo": [ + { + "Connection": { + "commonDslConnectorProperties": [ + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + {"name": "format", "value": "table"}, + {"name": "store", "value": "sqlserver"}, + {"name": "databaseType", "value": "databaseType"}, + {"name": "database", "value": "database"}, + {"name": "deletable", "value": False}, + {"name": "insertable", "value": True}, + {"name": "updateable", "value": False}, + {"name": "upsertable", "value": False}, + {"name": "skipDuplicateMapInputs", "value": True}, + {"name": "skipDuplicateMapOutputs", "value": True}, + ], + "isInlineDataset": True, + "linkedService": {"referenceName": "amjaSql", "type": "LinkedServiceReference"}, + "linkedServiceType": "AzureSqlDatabase", + "type": "linkedservicetype", + }, + "DataMapperMappings": [ + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReferences": [ + { + "entity": "source/customer", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustAddres", + } + ], + "expression": "trim(CustAddres)", + "functionName": "trim", + "name": "CustAddres", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "source/customer", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustDepName", + }, + "functionName": "", + "name": "CustDeptName", + "type": "Direct", + }, + { + "attributeReference": { + "entity": "source/customer", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + "functionName": "", + "name": "CustEmail", + "type": "Direct", + }, + { + "attributeReference": { + "entity": "source/customer", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustId", + }, + "functionName": "", + "name": "CustId", + "type": "Direct", + }, + { + "attributeReference": { + "entity": "source/customer", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + "functionName": "", + "name": "CustName", + "type": "Direct", + }, + ] + }, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/customer", + "targetEntityName": "dbo.customer", + }, + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReferences": [ + { + "entity": "lookup", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "EmpName", + } + ], + "expression": "upper(EmpName)", + "functionName": "upper", + "name": "Name", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "lookup", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "EmpId", + }, + "functionName": "", + "name": "PersonID", + "type": "Direct", + }, + ] + }, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "lookup", + "targetEntityName": "dbo.data_source_table", + }, + { + "attributeMappingInfo": {"attributeMappings": []}, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/employee", + "targetEntityName": "dbo.employee", + }, + { + "attributeMappingInfo": { + "attributeMappings": [ + { + "attributeReferences": [ + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustAddres", + } + ], + "expression": "trim(CustAddres)", + "functionName": "trim", + "name": "CustAddres", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustDepLoc", + }, + "name": "CustDepLoc", + "type": "Direct", + }, + { + "attributeReferences": [ + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustDepName", + }, + ], + "expression": 'concat(CustName, " -> ", CustDepName)', + "functionName": "", + "name": "CustDepName", + "type": "Derived", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustId", + }, + "functionName": "", + "name": "CustId", + "type": "Direct", + }, + { + "attributeReference": { + "entity": "source/justSchema", + "entityConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "name": "CustName", + }, + "name": "CustName", + "type": "Direct", + }, + ] + }, + "sourceConnectionReference": { + "connectionName": "amjaAdls03", + "type": "linkedservicetype", + }, + "sourceEntityName": "source/justSchema", + "targetEntityName": "dbo.justSchema", + }, + ], + "Relationships": [], + "TargetEntities": [ + { + "name": "dbo.employee", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "employee"}, + ], + "schema": [], + }, + }, + { + "name": "dbo.justSchema", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "justSchema"}, + {"name": "allowSchemaDrift", "value": True}, + {"name": "inferDriftedColumnTypes", "value": True}, + ], + "schema": [], + }, + }, + { + "name": "dbo.customer", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "customer"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + ], + "schema": [ + {"dataType": "integer", "name": "CustId"}, + {"dataType": "string", "name": "CustName"}, + {"dataType": "string", "name": "CustAddres"}, + {"dataType": "string", "name": "CustDeptName"}, + {"dataType": "string", "name": "CustEmail"}, + ], + }, + }, + { + "name": "dbo.data_source_table", + "properties": { + "dslConnectorProperties": [ + {"name": "schemaName", "value": "dbo"}, + {"name": "tableName", "value": "data_source_table"}, + {"name": "allowSchemaDrift", "value": False}, + {"name": "inferDriftedColumnTypes", "value": False}, + {"name": "defaultToUpsert", "value": False}, + ], + "schema": [ + {"dataType": "integer", "name": "PersonID"}, + {"dataType": "string", "name": "Name"}, + {"dataType": "timestamp", "name": "LastModifytime"}, + ], + }, + }, + ], + } + ], + "allowVNetOverride": False, + "description": "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.", + "status": "Stopped", + } + }, + ) + print(response) + + +# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Update.json +if __name__ == "__main__": + main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_delete.py index 78148b9851bb..2f11b533cd77 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.credential_operations.delete( + client.credential_operations.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", credential_name="exampleCredential", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py index 4cf9717e32f0..0c088894f52e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.data_flow_debug_session.delete( + client.data_flow_debug_session.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", request={"sessionId": "91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"}, ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_delete.py index 367b1499bc88..ff38280b1ae1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.data_flows.delete( + client.data_flows.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", data_flow_name="exampleDataFlow", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_delete.py index 96005a1b97b7..1d9057087069 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.datasets.delete( + client.datasets.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", dataset_name="exampleDataset", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/delete_private_endpoint_connection.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/delete_private_endpoint_connection.py index 7bb6b8610235..d4d7374994aa 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/delete_private_endpoint_connection.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/delete_private_endpoint_connection.py @@ -29,12 +29,11 @@ def main(): subscription_id="34adfa4f-cedf-4dc0-ba29-b6d1a69ab345", ) - response = client.private_endpoint_connection.delete( + client.private_endpoint_connection.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", private_endpoint_connection_name="connection", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DeletePrivateEndpointConnection.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_delete.py index 6a25a45b0056..8491113ab999 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_delete.py @@ -29,11 +29,10 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.factories.delete( + client.factories.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_delete.py index ce059dba7511..5c01f1bbf799 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.global_parameters.delete( + client.global_parameters.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", global_parameter_name="default", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_delete.py index 890275e51b18..8d14988db7ce 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_delete.py @@ -29,13 +29,12 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtime_nodes.delete( + client.integration_runtime_nodes.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleIntegrationRuntime", node_name="Node_1", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_delete.py index 4667097eff43..5c97b05c0290 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtimes.delete( + client.integration_runtimes.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleIntegrationRuntime", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py index a77b0dc65e70..3c4534541edc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py @@ -29,13 +29,12 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtimes.remove_links( + client.integration_runtimes.remove_links( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleIntegrationRuntime", linked_integration_runtime_request={"factoryName": "exampleFactoryName-linked"}, ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RemoveLinks.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_stop.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_stop.py index 747e2d8854b7..7bd1fa2c295a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_stop.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_stop.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtimes.begin_stop( + client.integration_runtimes.begin_stop( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleManagedIntegrationRuntime", ).result() - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Stop.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_sync_credentials.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_sync_credentials.py index 14cb54320cad..2c1760ccf429 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_sync_credentials.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_sync_credentials.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtimes.sync_credentials( + client.integration_runtimes.sync_credentials( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleIntegrationRuntime", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_SyncCredentials.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_upgrade.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_upgrade.py index f0bee541b854..eb6c687df761 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_upgrade.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_upgrade.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.integration_runtimes.upgrade( + client.integration_runtimes.upgrade( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", integration_runtime_name="exampleIntegrationRuntime", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Upgrade.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_delete.py index ce3b0d95cad1..82be70cc1315 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.linked_services.delete( + client.linked_services.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", linked_service_name="exampleLinkedService", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_delete.py index 430542ebcbfe..eeafb5400b10 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_delete.py @@ -29,13 +29,12 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.managed_private_endpoints.delete( + client.managed_private_endpoints.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", managed_virtual_network_name="exampleManagedVirtualNetworkName", managed_private_endpoint_name="exampleManagedPrivateEndpointName", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_cancel.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_cancel.py index 1255f2627e43..7b61ffcc634c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_cancel.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_cancel.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.pipeline_runs.cancel( + client.pipeline_runs.cancel( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", run_id="16ac5348-ff82-4f95-a80d-638c1d47b721", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_delete.py index 9c32a2f7eeec..a3930e4cdc00 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.pipelines.delete( + client.pipelines.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", pipeline_name="examplePipeline", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_cancel.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_cancel.py index 53f5958809d7..e6397d3adbbe 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_cancel.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_cancel.py @@ -29,13 +29,12 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.trigger_runs.cancel( + client.trigger_runs.cancel( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", trigger_name="exampleTrigger", run_id="2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_rerun.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_rerun.py index 17237b977b04..7028af759654 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_rerun.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_rerun.py @@ -29,13 +29,12 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.trigger_runs.rerun( + client.trigger_runs.rerun( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", trigger_name="exampleTrigger", run_id="2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_delete.py index 2f954469afe2..4ee618779ac7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_delete.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.triggers.delete( + client.triggers.delete( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", trigger_name="exampleTrigger", ) - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_start.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_start.py index 92f7bfd20e88..096366ecd39e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_start.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_start.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.triggers.begin_start( + client.triggers.begin_start( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", trigger_name="exampleTrigger", ).result() - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_stop.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_stop.py index bbadbb26a214..98e2d192636f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_stop.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_stop.py @@ -29,12 +29,11 @@ def main(): subscription_id="12345678-1234-1234-1234-12345678abc", ) - response = client.triggers.begin_stop( + client.triggers.begin_stop( resource_group_name="exampleResourceGroup", factory_name="exampleFactoryName", trigger_name="exampleTrigger", ).result() - print(response) # x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json