Skip to content

Commit

Permalink
CodeGen from PR 17036 in Azure/azure-rest-api-specs
Browse files Browse the repository at this point in the history
Add servicePrincipalCredentialType and servicePrincipalCredential into AzureBlobFSLinkedService (Azure#17036)

* Add servicePrincipalCredentialType and servicePrincipalCredential into AzureBlobFSLinkedService

* update

* revert some change
  • Loading branch information
SDKAuto committed Jan 4, 2022
1 parent 86f6dc0 commit 5b4f8dc
Show file tree
Hide file tree
Showing 55 changed files with 59,433 additions and 26,965 deletions.
10 changes: 5 additions & 5 deletions sdk/datafactory/azure-mgmt-datafactory/_meta.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
{
"autorest": "3.4.5",
"autorest": "3.7.2",
"use": [
"@autorest/python@5.8.4",
"@autorest/[email protected].2"
"@autorest/python@5.12.0",
"@autorest/[email protected].3"
],
"commit": "9e8a591da83285d863866ecd2f6fe87a72758a7d",
"commit": "949388b9dfddc435859fda2850c5bb5019aa217b",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/[email protected].2 --version=3.4.5",
"autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.12.0 --use=@autorest/[email protected].3 --version=3.7.2",
"readme": "specification/datafactory/resource-manager/readme.md"
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
__version__ = VERSION
__all__ = ['DataFactoryManagementClient']

try:
from ._patch import patch_sdk # type: ignore
patch_sdk()
except ImportError:
pass
# `._patch.py` is used for handwritten extensions to the generated code
# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
from ._patch import patch_sdk
patch_sdk()
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy

from ._version import VERSION

Expand Down Expand Up @@ -40,11 +40,11 @@ def __init__(
**kwargs # type: Any
):
# type: (...) -> None
super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)

self.credential = credential
self.subscription_id = subscription_id
Expand All @@ -68,4 +68,4 @@ def _configure(
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
Original file line number Diff line number Diff line change
Expand Up @@ -6,41 +6,22 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from copy import deepcopy
from typing import TYPE_CHECKING

from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer

from . import models
from ._configuration import DataFactoryManagementClientConfiguration
from .operations import ActivityRunsOperations, DataFlowDebugSessionOperations, DataFlowsOperations, DatasetsOperations, ExposureControlOperations, FactoriesOperations, IntegrationRuntimeNodesOperations, IntegrationRuntimeObjectMetadataOperations, IntegrationRuntimesOperations, LinkedServicesOperations, ManagedPrivateEndpointsOperations, ManagedVirtualNetworksOperations, Operations, PipelineRunsOperations, PipelinesOperations, PrivateEndPointConnectionsOperations, PrivateEndpointConnectionOperations, PrivateLinkResourcesOperations, TriggerRunsOperations, TriggersOperations

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional

from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse

from ._configuration import DataFactoryManagementClientConfiguration
from .operations import Operations
from .operations import FactoriesOperations
from .operations import ExposureControlOperations
from .operations import IntegrationRuntimesOperations
from .operations import IntegrationRuntimeObjectMetadataOperations
from .operations import IntegrationRuntimeNodesOperations
from .operations import LinkedServicesOperations
from .operations import DatasetsOperations
from .operations import PipelinesOperations
from .operations import PipelineRunsOperations
from .operations import ActivityRunsOperations
from .operations import TriggersOperations
from .operations import TriggerRunsOperations
from .operations import DataFlowsOperations
from .operations import DataFlowDebugSessionOperations
from .operations import ManagedVirtualNetworksOperations
from .operations import ManagedPrivateEndpointsOperations
from .operations import PrivateEndPointConnectionsOperations
from .operations import PrivateEndpointConnectionOperations
from .operations import PrivateLinkResourcesOperations
from . import models

from azure.core.rest import HttpRequest, HttpResponse

class DataFactoryManagementClient(object):
"""The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.
Expand All @@ -53,10 +34,13 @@ class DataFactoryManagementClient(object):
:vartype exposure_control: azure.mgmt.datafactory.operations.ExposureControlOperations
:ivar integration_runtimes: IntegrationRuntimesOperations operations
:vartype integration_runtimes: azure.mgmt.datafactory.operations.IntegrationRuntimesOperations
:ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations
:vartype integration_runtime_object_metadata: azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations
:ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations
operations
:vartype integration_runtime_object_metadata:
azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations
:ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations
:vartype integration_runtime_nodes: azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations
:vartype integration_runtime_nodes:
azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations
:ivar linked_services: LinkedServicesOperations operations
:vartype linked_services: azure.mgmt.datafactory.operations.LinkedServicesOperations
:ivar datasets: DatasetsOperations operations
Expand All @@ -74,101 +58,96 @@ class DataFactoryManagementClient(object):
:ivar data_flows: DataFlowsOperations operations
:vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations
:ivar data_flow_debug_session: DataFlowDebugSessionOperations operations
:vartype data_flow_debug_session: azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations
:vartype data_flow_debug_session:
azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations
:ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations
:vartype managed_virtual_networks: azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations
:vartype managed_virtual_networks:
azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations
:ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations
:vartype managed_private_endpoints: azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations
:vartype managed_private_endpoints:
azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations
:ivar private_end_point_connections: PrivateEndPointConnectionsOperations operations
:vartype private_end_point_connections: azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations
:vartype private_end_point_connections:
azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations
:ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations
:vartype private_endpoint_connection: azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations
:vartype private_endpoint_connection:
azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations
:ivar private_link_resources: PrivateLinkResourcesOperations operations
:vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations
:vartype private_link_resources:
azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""

def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
base_url="https://management.azure.com", # type: str
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs)
self._config = DataFactoryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)

client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)

self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.factories = FactoriesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.exposure_control = ExposureControlOperations(
self._client, self._config, self._serialize, self._deserialize)
self.integration_runtimes = IntegrationRuntimesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(
self._client, self._config, self._serialize, self._deserialize)
self.integration_runtime_nodes = IntegrationRuntimeNodesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.linked_services = LinkedServicesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.datasets = DatasetsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.pipelines = PipelinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.pipeline_runs = PipelineRunsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.activity_runs = ActivityRunsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.triggers = TriggersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.trigger_runs = TriggerRunsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.data_flows = DataFlowsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.data_flow_debug_session = DataFlowDebugSessionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.managed_virtual_networks = ManagedVirtualNetworksOperations(
self._client, self._config, self._serialize, self._deserialize)
self.managed_private_endpoints = ManagedPrivateEndpointsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.private_end_point_connections = PrivateEndPointConnectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connection = PrivateEndpointConnectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.private_link_resources = PrivateLinkResourcesOperations(
self._client, self._config, self._serialize, self._deserialize)

def _send_request(self, http_request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.factories = FactoriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.exposure_control = ExposureControlOperations(self._client, self._config, self._serialize, self._deserialize)
self.integration_runtimes = IntegrationRuntimesOperations(self._client, self._config, self._serialize, self._deserialize)
self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(self._client, self._config, self._serialize, self._deserialize)
self.integration_runtime_nodes = IntegrationRuntimeNodesOperations(self._client, self._config, self._serialize, self._deserialize)
self.linked_services = LinkedServicesOperations(self._client, self._config, self._serialize, self._deserialize)
self.datasets = DatasetsOperations(self._client, self._config, self._serialize, self._deserialize)
self.pipelines = PipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.activity_runs = ActivityRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.triggers = TriggersOperations(self._client, self._config, self._serialize, self._deserialize)
self.trigger_runs = TriggerRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.data_flows = DataFlowsOperations(self._client, self._config, self._serialize, self._deserialize)
self.data_flow_debug_session = DataFlowDebugSessionOperations(self._client, self._config, self._serialize, self._deserialize)
self.managed_virtual_networks = ManagedVirtualNetworksOperations(self._client, self._config, self._serialize, self._deserialize)
self.managed_private_endpoints = ManagedPrivateEndpointsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_end_point_connections = PrivateEndPointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connection = PrivateEndpointConnectionOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize)


def _send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.HttpResponse
:rtype: ~azure.core.rest.HttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response

request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)

def close(self):
# type: () -> None
Expand Down
Loading

0 comments on commit 5b4f8dc

Please sign in to comment.