From 6a56b7655fa9914c1ff24f01ac68d48b46c825a1 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Fri, 27 May 2022 07:54:51 +0000 Subject: [PATCH] CodeGen from PR 19247 in Azure/azure-rest-api-specs Merge 59fdc57bc4684aed97ea547d42ba7ee9915dc7a6 into 40fdd84627937fdd8b3b06486497c8f2cfced350 --- .../azure-mgmt-datafactory/_meta.json | 4 +- .../_data_factory_management_client.py | 5 +- .../azure/mgmt/datafactory/_metadata.json | 3 +- .../azure/mgmt/datafactory/_version.py | 2 +- .../aio/_data_factory_management_client.py | 5 +- .../datafactory/aio/operations/__init__.py | 2 + .../_global_parameters_operations.py | 318 ++++++++++++ .../azure/mgmt/datafactory/models/__init__.py | 6 + .../_data_factory_management_client_enums.py | 1 + .../mgmt/datafactory/models/_models_py3.py | 218 +++++++- .../mgmt/datafactory/operations/__init__.py | 2 + .../_global_parameters_operations.py | 479 ++++++++++++++++++ 12 files changed, 1035 insertions(+), 10 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index 8f31560cb36b0..0d2909a0297ec 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -4,8 +4,8 @@ "@autorest/python@5.13.0", "@autorest/modelerfour@4.19.3" ], - "commit": "10429710d1b194e1b47087fe95a7e89d29e25f4f", + "commit": "0103b10be26486c6cbae83ae44641069f92da0fe", "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --python3-only --use=@autorest/python@5.13.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --python3-only --use=@autorest/python@5.13.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index a0e21062adb00..bfd2881fd328d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -16,7 +16,7 @@ from . import models from ._configuration import DataFactoryManagementClientConfiguration -from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations +from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, GlobalParametersOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -75,6 +75,8 @@ class DataFactoryManagementClient: # pylint: disable=too-many-instance-attrib :ivar private_link_resources: PrivateLinkResourcesOperations operations :vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations + :ivar global_parameters: GlobalParametersOperations operations + :vartype global_parameters: azure.mgmt.datafactory.operations.GlobalParametersOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -122,6 +124,7 @@ def __init__( self.private_end_point_connections = PrivateEndPointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoint_connection = PrivateEndpointConnectionOperations(self._client, self._config, self._serialize, self._deserialize) self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) + self.global_parameters = GlobalParametersOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json index 63d8c65d85011..dc3aad5c24275 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_metadata.json @@ -116,6 +116,7 @@ "managed_private_endpoints": "ManagedPrivateEndpointsOperations", "private_end_point_connections": "PrivateEndPointConnectionsOperations", "private_endpoint_connection": "PrivateEndpointConnectionOperations", - "private_link_resources": "PrivateLinkResourcesOperations" + "private_link_resources": "PrivateLinkResourcesOperations", + "global_parameters": "GlobalParametersOperations" } } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index b7aa640531e8a..c47f66669f1bf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.5.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py index 9edec66dc8d09..75e5e7010ff6d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/_data_factory_management_client.py @@ -16,7 +16,7 @@ from .. import models from ._configuration import DataFactoryManagementClientConfiguration -from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations +from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, GlobalParametersOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -76,6 +76,8 @@ class DataFactoryManagementClient: # pylint: disable=too-many-instance-attrib :ivar private_link_resources: PrivateLinkResourcesOperations operations :vartype private_link_resources: azure.mgmt.datafactory.aio.operations.PrivateLinkResourcesOperations + :ivar global_parameters: GlobalParametersOperations operations + :vartype global_parameters: azure.mgmt.datafactory.aio.operations.GlobalParametersOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. @@ -123,6 +125,7 @@ def __init__( self.private_end_point_connections = PrivateEndPointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) self.private_endpoint_connection = PrivateEndpointConnectionOperations(self._client, self._config, self._serialize, self._deserialize) self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) + self.global_parameters = GlobalParametersOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py index c1da8c996a37a..f6161d20dd41e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/__init__.py @@ -26,6 +26,7 @@ from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._global_parameters_operations import GlobalParametersOperations __all__ = [ 'Operations', @@ -48,4 +49,5 @@ 'PrivateEndPointConnectionsOperations', 'PrivateEndpointConnectionOperations', 'PrivateLinkResourcesOperations', + 'GlobalParametersOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py new file mode 100644 index 0000000000000..fa29d76c8597f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_global_parameters_operations.py @@ -0,0 +1,318 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._global_parameters_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_by_factory_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class GlobalParametersOperations: + """GlobalParametersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + @distributed_trace + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.GlobalParameterListResponse"]: + """Lists Global parameters. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GlobalParameterListResponse or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.GlobalParameterListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + api_version=api_version, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + api_version=api_version, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("GlobalParameterListResponse", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any + ) -> "_models.GlobalParameterResource": + """Gets a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: "_models.GlobalParameterResource", + **kwargs: Any + ) -> "_models.GlobalParameterResource": + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :param default: Global parameter resource definition. + :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(default, 'GlobalParameterResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any + ) -> None: + """Deletes a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index db14d1b698bd9..7a4b8569b8857 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -290,6 +290,8 @@ from ._models_py3 import GitHubAccessTokenRequest from ._models_py3 import GitHubAccessTokenResponse from ._models_py3 import GitHubClientSecret +from ._models_py3 import GlobalParameterListResponse +from ._models_py3 import GlobalParameterResource from ._models_py3 import GlobalParameterSpecification from ._models_py3 import GoogleAdWordsLinkedService from ._models_py3 import GoogleAdWordsObjectDataset @@ -509,6 +511,7 @@ from ._models_py3 import PrivateLinkResource from ._models_py3 import PrivateLinkResourceProperties from ._models_py3 import PrivateLinkResourcesWrapper +from ._models_py3 import PurviewConfiguration from ._models_py3 import QueryDataFlowDebugSessionsResponse from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset @@ -1105,6 +1108,8 @@ 'GitHubAccessTokenRequest', 'GitHubAccessTokenResponse', 'GitHubClientSecret', + 'GlobalParameterListResponse', + 'GlobalParameterResource', 'GlobalParameterSpecification', 'GoogleAdWordsLinkedService', 'GoogleAdWordsObjectDataset', @@ -1324,6 +1329,7 @@ 'PrivateLinkResource', 'PrivateLinkResourceProperties', 'PrivateLinkResourcesWrapper', + 'PurviewConfiguration', 'QueryDataFlowDebugSessionsResponse', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 554cca2051f0b..01a00efd87ed3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -538,6 +538,7 @@ class RestServiceAuthenticationType(with_metaclass(CaseInsensitiveEnumMeta, str, BASIC = "Basic" AAD_SERVICE_PRINCIPAL = "AadServicePrincipal" MANAGED_SERVICE_IDENTITY = "ManagedServiceIdentity" + O_AUTH2_CLIENT_CREDENTIAL = "OAuth2ClientCredential" class RunQueryFilterOperand(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): """Parameter name to be used for filter. The allowed operands to query pipeline runs are diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index d34b7fc5a1476..f0298d1f82acd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -17986,6 +17986,8 @@ class DataFlowSink(Transformation): :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :ivar schema_linked_service: Schema linked service reference. :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar rejected_data_linked_service: Rejected data linked service reference. + :vartype rejected_data_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { @@ -17999,6 +18001,7 @@ class DataFlowSink(Transformation): 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, + 'rejected_data_linked_service': {'key': 'rejectedDataLinkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -18010,6 +18013,7 @@ def __init__( linked_service: Optional["LinkedServiceReference"] = None, flowlet: Optional["DataFlowReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, + rejected_data_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): """ @@ -18025,9 +18029,12 @@ def __init__( :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :keyword schema_linked_service: Schema linked service reference. :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword rejected_data_linked_service: Rejected data linked service reference. + :paramtype rejected_data_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ super(DataFlowSink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, **kwargs) self.schema_linked_service = schema_linked_service + self.rejected_data_linked_service = rejected_data_linked_service class DataFlowSource(Transformation): @@ -22453,6 +22460,9 @@ class ExecuteDataFlowActivity(ExecutionActivity): the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :vartype run_concurrently: any + :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to + the sink. Type: integer (or Expression with resultType integer). + :vartype source_staging_concurrency: any """ _validation = { @@ -22477,6 +22487,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, + 'source_staging_concurrency': {'key': 'typeProperties.sourceStagingConcurrency', 'type': 'object'}, } def __init__( @@ -22496,6 +22507,7 @@ def __init__( trace_level: Optional[Any] = None, continue_on_error: Optional[Any] = None, run_concurrently: Optional[Any] = None, + source_staging_concurrency: Optional[Any] = None, **kwargs ): """ @@ -22532,6 +22544,9 @@ def __init__( with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :paramtype run_concurrently: any + :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable + to the sink. Type: integer (or Expression with resultType integer). + :paramtype source_staging_concurrency: any """ super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'ExecuteDataFlow' # type: str @@ -22542,6 +22557,7 @@ def __init__( self.trace_level = trace_level self.continue_on_error = continue_on_error self.run_concurrently = run_concurrently + self.source_staging_concurrency = source_staging_concurrency class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): @@ -22567,6 +22583,9 @@ class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :vartype run_concurrently: any + :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to + the sink. Type: integer (or Expression with resultType integer). + :vartype source_staging_concurrency: any """ _validation = { @@ -22581,6 +22600,7 @@ class ExecuteDataFlowActivityTypeProperties(msrest.serialization.Model): 'trace_level': {'key': 'traceLevel', 'type': 'object'}, 'continue_on_error': {'key': 'continueOnError', 'type': 'object'}, 'run_concurrently': {'key': 'runConcurrently', 'type': 'object'}, + 'source_staging_concurrency': {'key': 'sourceStagingConcurrency', 'type': 'object'}, } def __init__( @@ -22593,6 +22613,7 @@ def __init__( trace_level: Optional[Any] = None, continue_on_error: Optional[Any] = None, run_concurrently: Optional[Any] = None, + source_staging_concurrency: Optional[Any] = None, **kwargs ): """ @@ -22614,6 +22635,9 @@ def __init__( with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :paramtype run_concurrently: any + :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable + to the sink. Type: integer (or Expression with resultType integer). + :paramtype source_staging_concurrency: any """ super(ExecuteDataFlowActivityTypeProperties, self).__init__(**kwargs) self.data_flow = data_flow @@ -22623,6 +22647,7 @@ def __init__( self.trace_level = trace_level self.continue_on_error = continue_on_error self.run_concurrently = run_concurrently + self.source_staging_concurrency = source_staging_concurrency class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): @@ -22815,6 +22840,9 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :vartype run_concurrently: any + :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to + the sink. Type: integer (or Expression with resultType integer). + :vartype source_staging_concurrency: any :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] @@ -22834,6 +22862,7 @@ class ExecutePowerQueryActivityTypeProperties(ExecuteDataFlowActivityTypePropert 'trace_level': {'key': 'traceLevel', 'type': 'object'}, 'continue_on_error': {'key': 'continueOnError', 'type': 'object'}, 'run_concurrently': {'key': 'runConcurrently', 'type': 'object'}, + 'source_staging_concurrency': {'key': 'sourceStagingConcurrency', 'type': 'object'}, 'sinks': {'key': 'sinks', 'type': '{PowerQuerySink}'}, 'queries': {'key': 'queries', 'type': '[PowerQuerySinkMapping]'}, } @@ -22848,6 +22877,7 @@ def __init__( trace_level: Optional[Any] = None, continue_on_error: Optional[Any] = None, run_concurrently: Optional[Any] = None, + source_staging_concurrency: Optional[Any] = None, sinks: Optional[Dict[str, "PowerQuerySink"]] = None, queries: Optional[List["PowerQuerySinkMapping"]] = None, **kwargs @@ -22871,13 +22901,16 @@ def __init__( with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :paramtype run_concurrently: any + :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable + to the sink. Type: integer (or Expression with resultType integer). + :paramtype source_staging_concurrency: any :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] :keyword queries: List of mapping for Power Query mashup query to sink dataset(s). :paramtype queries: list[~azure.mgmt.datafactory.models.PowerQuerySinkMapping] """ - super(ExecutePowerQueryActivityTypeProperties, self).__init__(data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, run_concurrently=run_concurrently, **kwargs) + super(ExecutePowerQueryActivityTypeProperties, self).__init__(data_flow=data_flow, staging=staging, integration_runtime=integration_runtime, compute=compute, trace_level=trace_level, continue_on_error=continue_on_error, run_concurrently=run_concurrently, source_staging_concurrency=source_staging_concurrency, **kwargs) self.sinks = sinks self.queries = queries @@ -23093,6 +23126,9 @@ class ExecuteWranglingDataflowActivity(Activity): the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :vartype run_concurrently: any + :ivar source_staging_concurrency: Specify number of parallel staging for sources applicable to + the sink. Type: integer (or Expression with resultType integer). + :vartype source_staging_concurrency: any :ivar sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :vartype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] @@ -23121,6 +23157,7 @@ class ExecuteWranglingDataflowActivity(Activity): 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, + 'source_staging_concurrency': {'key': 'typeProperties.sourceStagingConcurrency', 'type': 'object'}, 'sinks': {'key': 'typeProperties.sinks', 'type': '{PowerQuerySink}'}, 'queries': {'key': 'typeProperties.queries', 'type': '[PowerQuerySinkMapping]'}, } @@ -23141,6 +23178,7 @@ def __init__( trace_level: Optional[Any] = None, continue_on_error: Optional[Any] = None, run_concurrently: Optional[Any] = None, + source_staging_concurrency: Optional[Any] = None, sinks: Optional[Dict[str, "PowerQuerySink"]] = None, queries: Optional[List["PowerQuerySinkMapping"]] = None, **kwargs @@ -23177,6 +23215,9 @@ def __init__( with the same save order to be processed concurrently. Type: boolean (or Expression with resultType boolean). :paramtype run_concurrently: any + :keyword source_staging_concurrency: Specify number of parallel staging for sources applicable + to the sink. Type: integer (or Expression with resultType integer). + :paramtype source_staging_concurrency: any :keyword sinks: (Deprecated. Please use Queries). List of Power Query activity sinks mapped to a queryName. :paramtype sinks: dict[str, ~azure.mgmt.datafactory.models.PowerQuerySink] @@ -23193,6 +23234,7 @@ def __init__( self.trace_level = trace_level self.continue_on_error = continue_on_error self.run_concurrently = run_concurrently + self.source_staging_concurrency = source_staging_concurrency self.sinks = sinks self.queries = queries @@ -23450,6 +23492,8 @@ class Factory(Resource): :vartype create_time: ~datetime.datetime :ivar version: Version of the factory. :vartype version: str + :ivar purview_configuration: Purview information of the factory. + :vartype purview_configuration: ~azure.mgmt.datafactory.models.PurviewConfiguration :ivar repo_configuration: Git repo information of the factory. :vartype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration :ivar global_parameters: List of parameters for factory. @@ -23484,6 +23528,7 @@ class Factory(Resource): 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, 'version': {'key': 'properties.version', 'type': 'str'}, + 'purview_configuration': {'key': 'properties.purviewConfiguration', 'type': 'PurviewConfiguration'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, @@ -23497,6 +23542,7 @@ def __init__( tags: Optional[Dict[str, str]] = None, additional_properties: Optional[Dict[str, Any]] = None, identity: Optional["FactoryIdentity"] = None, + purview_configuration: Optional["PurviewConfiguration"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, encryption: Optional["EncryptionConfiguration"] = None, @@ -23513,6 +23559,8 @@ def __init__( :paramtype additional_properties: dict[str, any] :keyword identity: Managed service identity of the factory. :paramtype identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :keyword purview_configuration: Purview information of the factory. + :paramtype purview_configuration: ~azure.mgmt.datafactory.models.PurviewConfiguration :keyword repo_configuration: Git repo information of the factory. :paramtype repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration :keyword global_parameters: List of parameters for factory. @@ -23530,6 +23578,7 @@ def __init__( self.provisioning_state = None self.create_time = None self.version = None + self.purview_configuration = purview_configuration self.repo_configuration = repo_configuration self.global_parameters = global_parameters self.encryption = encryption @@ -25531,6 +25580,93 @@ def __init__( self.byoa_secret_name = byoa_secret_name +class GlobalParameterListResponse(msrest.serialization.Model): + """A list of Global parameters. + + All required parameters must be populated in order to send to Azure. + + :ivar value: Required. List of global parameters. + :vartype value: list[~azure.mgmt.datafactory.models.GlobalParameterResource] + :ivar next_link: The link to the next page of results, if any remaining results exist. + :vartype next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[GlobalParameterResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["GlobalParameterResource"], + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: Required. List of global parameters. + :paramtype value: list[~azure.mgmt.datafactory.models.GlobalParameterResource] + :keyword next_link: The link to the next page of results, if any remaining results exist. + :paramtype next_link: str + """ + super(GlobalParameterListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class GlobalParameterResource(SubResource): + """Global parameters resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :ivar properties: Required. Properties of the global parameter. + :vartype properties: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{GlobalParameterSpecification}'}, + } + + def __init__( + self, + *, + properties: Dict[str, "GlobalParameterSpecification"], + **kwargs + ): + """ + :keyword properties: Required. Properties of the global parameter. + :paramtype properties: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] + """ + super(GlobalParameterResource, self).__init__(**kwargs) + self.properties = properties + + class GlobalParameterSpecification(msrest.serialization.Model): """Definition of a single parameter for an entity. @@ -42452,6 +42588,8 @@ class PowerQuerySink(DataFlowSink): :vartype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :ivar schema_linked_service: Schema linked service reference. :vartype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar rejected_data_linked_service: Rejected data linked service reference. + :vartype rejected_data_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :ivar script: sink script. :vartype script: str """ @@ -42467,6 +42605,7 @@ class PowerQuerySink(DataFlowSink): 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, + 'rejected_data_linked_service': {'key': 'rejectedDataLinkedService', 'type': 'LinkedServiceReference'}, 'script': {'key': 'script', 'type': 'str'}, } @@ -42479,6 +42618,7 @@ def __init__( linked_service: Optional["LinkedServiceReference"] = None, flowlet: Optional["DataFlowReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, + rejected_data_linked_service: Optional["LinkedServiceReference"] = None, script: Optional[str] = None, **kwargs ): @@ -42495,10 +42635,12 @@ def __init__( :paramtype flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :keyword schema_linked_service: Schema linked service reference. :paramtype schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword rejected_data_linked_service: Rejected data linked service reference. + :paramtype rejected_data_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :keyword script: sink script. :paramtype script: str """ - super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, **kwargs) + super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, flowlet=flowlet, schema_linked_service=schema_linked_service, rejected_data_linked_service=rejected_data_linked_service, **kwargs) self.script = script @@ -43332,6 +43474,31 @@ def __init__( self.value = value +class PurviewConfiguration(msrest.serialization.Model): + """Purview configuration. + + :ivar purview_resource_id: Purview resource id. + :vartype purview_resource_id: str + """ + + _attribute_map = { + 'purview_resource_id': {'key': 'purviewResourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + purview_resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword purview_resource_id: Purview resource id. + :paramtype purview_resource_id: str + """ + super(PurviewConfiguration, self).__init__(**kwargs) + self.purview_resource_id = purview_resource_id + + class QueryDataFlowDebugSessionsResponse(msrest.serialization.Model): """A list of active debug sessions. @@ -44760,7 +44927,7 @@ class RestServiceLinkedService(LinkedService): :vartype enable_server_certificate_validation: any :ivar authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity". + "ManagedServiceIdentity", "OAuth2ClientCredential". :vartype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :ivar user_name: The user name used in Basic authentication type. @@ -44791,6 +44958,20 @@ class RestServiceLinkedService(LinkedService): :vartype encrypted_credential: any :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference + :ivar client_id: The client ID associated with your application. Type: string (or Expression + with resultType string). + :vartype client_id: any + :ivar client_secret: The client secret associated with your application. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar token_endpoint: The token endpoint of the authorization server to acquire access token. + Type: string (or Expression with resultType string). + :vartype token_endpoint: any + :ivar resource: The target service or resource to which the access will be requested. Type: + string (or Expression with resultType string). + :vartype resource: any + :ivar scope: The scope of the access required. It describes what kind of access will be + requested. Type: string (or Expression with resultType string). + :vartype scope: any """ _validation = { @@ -44819,6 +45000,11 @@ class RestServiceLinkedService(LinkedService): 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'token_endpoint': {'key': 'typeProperties.tokenEndpoint', 'type': 'object'}, + 'resource': {'key': 'typeProperties.resource', 'type': 'object'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'object'}, } def __init__( @@ -44842,6 +45028,11 @@ def __init__( aad_resource_id: Optional[Any] = None, encrypted_credential: Optional[Any] = None, credential: Optional["CredentialReference"] = None, + client_id: Optional[Any] = None, + client_secret: Optional["SecretBase"] = None, + token_endpoint: Optional[Any] = None, + resource: Optional[Any] = None, + scope: Optional[Any] = None, **kwargs ): """ @@ -44864,7 +45055,7 @@ def __init__( :paramtype enable_server_certificate_validation: any :keyword authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: "Anonymous", "Basic", "AadServicePrincipal", - "ManagedServiceIdentity". + "ManagedServiceIdentity", "OAuth2ClientCredential". :paramtype authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :keyword user_name: The user name used in Basic authentication type. @@ -44895,6 +45086,20 @@ def __init__( :paramtype encrypted_credential: any :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference + :keyword client_id: The client ID associated with your application. Type: string (or Expression + with resultType string). + :paramtype client_id: any + :keyword client_secret: The client secret associated with your application. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword token_endpoint: The token endpoint of the authorization server to acquire access + token. Type: string (or Expression with resultType string). + :paramtype token_endpoint: any + :keyword resource: The target service or resource to which the access will be requested. Type: + string (or Expression with resultType string). + :paramtype resource: any + :keyword scope: The scope of the access required. It describes what kind of access will be + requested. Type: string (or Expression with resultType string). + :paramtype scope: any """ super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'RestService' # type: str @@ -44911,6 +45116,11 @@ def __init__( self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential self.credential = credential + self.client_id = client_id + self.client_secret = client_secret + self.token_endpoint = token_endpoint + self.resource = resource + self.scope = scope class RestSink(CopySink): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index c1da8c996a37a..f6161d20dd41e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -26,6 +26,7 @@ from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._global_parameters_operations import GlobalParametersOperations __all__ = [ 'Operations', @@ -48,4 +49,5 @@ 'PrivateEndPointConnectionsOperations', 'PrivateEndpointConnectionOperations', 'PrivateLinkResourcesOperations', + 'GlobalParametersOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py new file mode 100644 index 0000000000000..950e0b647f890 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_global_parameters_operations.py @@ -0,0 +1,479 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +JSONType = Any +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_by_factory_request( + subscription_id: str, + resource_group_name: str, + factory_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + accept = "application/json" + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + accept = "application/json" + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + *, + json: JSONType = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + content_type = kwargs.pop('content_type', None) # type: Optional[str] + + accept = "application/json" + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + if content_type is not None: + _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_query_parameters, + headers=_header_parameters, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any +) -> HttpRequest: + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + accept = "application/json" + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + "factoryName": _SERIALIZER.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + "globalParameterName": _SERIALIZER.url("global_parameter_name", global_parameter_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] + _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] + _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_query_parameters, + headers=_header_parameters, + **kwargs + ) + +class GlobalParametersOperations(object): + """GlobalParametersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + @distributed_trace + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs: Any + ) -> Iterable["_models.GlobalParameterListResponse"]: + """Lists Global parameters. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GlobalParameterListResponse or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.GlobalParameterListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterListResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + api_version=api_version, + template_url=self.list_by_factory.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + else: + + request = build_list_by_factory_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + api_version=api_version, + template_url=next_link, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("GlobalParameterListResponse", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any + ) -> "_models.GlobalParameterResource": + """Gets a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + template_url=self.get.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + default: "_models.GlobalParameterResource", + **kwargs: Any + ) -> "_models.GlobalParameterResource": + """Creates or updates a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :param default: Global parameter resource definition. + :type default: ~azure.mgmt.datafactory.models.GlobalParameterResource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GlobalParameterResource, or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.GlobalParameterResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GlobalParameterResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + + _json = self._serialize.body(default, 'GlobalParameterResource') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GlobalParameterResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore + + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + factory_name: str, + global_parameter_name: str, + **kwargs: Any + ) -> None: + """Deletes a Global parameter. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param global_parameter_name: The global parameter name. + :type global_parameter_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + api_version = kwargs.pop('api_version', "2018-06-01") # type: str + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + factory_name=factory_name, + global_parameter_name=global_parameter_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/globalParameters/{globalParameterName}"} # type: ignore +